VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67007

Last change on this file since 67007 was 67007, checked in by vboxsync, 8 years ago

IEM: Tested and adjusted movq Pq,Qq (0x0f 0x6f).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 327.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67007 2017-05-22 11:52:13Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 */
3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /* MMX, greg64 */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(0, 1);
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3480 IEM_MC_FPU_TO_MMX_MODE();
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /* MMX, [mem64] */
3488 IEM_MC_BEGIN(0, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3494 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3496
3497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3499 IEM_MC_FPU_TO_MMX_MODE();
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /**
3508 * @opdone
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=0
3511 * @oppfx none
3512 * @opcpuid mmx
3513 * @opgroup og_mmx_datamove
3514 * @opxcpttype 5
3515 * @opfunction iemOp_movd_q_Pd_Ey
3516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* MMX, greg */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3532 IEM_MC_FPU_TO_MMX_MODE();
3533
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /* MMX, [mem] */
3540 IEM_MC_BEGIN(0, 2);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542 IEM_MC_LOCAL(uint32_t, u32Tmp);
3543
3544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3551 IEM_MC_FPU_TO_MMX_MODE();
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3561FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3562{
3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3565 {
3566 /**
3567 * @opcode 0x6e
3568 * @opcodesub rex.w=1
3569 * @oppfx 0x66
3570 * @opcpuid sse2
3571 * @opgroup og_sse2_simdint_datamov
3572 * @opxcpttype 5
3573 * @optest 64-bit / op1=1 op2=2 -> op1=2
3574 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3575 */
3576 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3578 {
3579 /* XMM, greg64 */
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_BEGIN(0, 1);
3582 IEM_MC_LOCAL(uint64_t, u64Tmp);
3583
3584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3585 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3586
3587 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3588 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3589
3590 IEM_MC_ADVANCE_RIP();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 /* XMM, [mem64] */
3596 IEM_MC_BEGIN(0, 2);
3597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3598 IEM_MC_LOCAL(uint64_t, u64Tmp);
3599
3600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3603 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3604
3605 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3606 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3607
3608 IEM_MC_ADVANCE_RIP();
3609 IEM_MC_END();
3610 }
3611 }
3612 else
3613 {
3614 /**
3615 * @opdone
3616 * @opcode 0x6e
3617 * @opcodesub rex.w=0
3618 * @oppfx 0x66
3619 * @opcpuid sse2
3620 * @opgroup og_sse2_simdint_datamov
3621 * @opxcpttype 5
3622 * @opfunction iemOp_movd_q_Vy_Ey
3623 * @optest op1=1 op2=2 -> op1=2
3624 * @optest op1=0 op2=-42 -> op1=-42
3625 */
3626 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3628 {
3629 /* XMM, greg32 */
3630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3631 IEM_MC_BEGIN(0, 1);
3632 IEM_MC_LOCAL(uint32_t, u32Tmp);
3633
3634 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3635 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3636
3637 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3638 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3639
3640 IEM_MC_ADVANCE_RIP();
3641 IEM_MC_END();
3642 }
3643 else
3644 {
3645 /* XMM, [mem32] */
3646 IEM_MC_BEGIN(0, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648 IEM_MC_LOCAL(uint32_t, u32Tmp);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3654
3655 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3656 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3657
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 }
3661 }
3662 return VINF_SUCCESS;
3663}
3664
3665/* Opcode 0xf3 0x0f 0x6e - invalid */
3666
3667
3668/**
3669 * @opcode 0x6f
3670 * @oppfx none
3671 * @opcpuid sse2
3672 * @opgroup og_mmx_datamove
3673 * @opxcpttype 5
3674 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3675 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3676 * @oponly
3677 */
3678FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3679{
3680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3681 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3683 {
3684 /*
3685 * Register, register.
3686 */
3687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3688 IEM_MC_BEGIN(0, 1);
3689 IEM_MC_LOCAL(uint64_t, u64Tmp);
3690
3691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3692 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3693
3694 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3695 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3696 IEM_MC_FPU_TO_MMX_MODE();
3697
3698 IEM_MC_ADVANCE_RIP();
3699 IEM_MC_END();
3700 }
3701 else
3702 {
3703 /*
3704 * Register, memory.
3705 */
3706 IEM_MC_BEGIN(0, 2);
3707 IEM_MC_LOCAL(uint64_t, u64Tmp);
3708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3709
3710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3712 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3713 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3714
3715 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3716 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3717 IEM_MC_FPU_TO_MMX_MODE();
3718
3719 IEM_MC_ADVANCE_RIP();
3720 IEM_MC_END();
3721 }
3722 return VINF_SUCCESS;
3723}
3724
3725/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3726FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3727{
3728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3729 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3731 {
3732 /*
3733 * Register, register.
3734 */
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_BEGIN(0, 0);
3737 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3738 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3739 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3740 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3741 IEM_MC_ADVANCE_RIP();
3742 IEM_MC_END();
3743 }
3744 else
3745 {
3746 /*
3747 * Register, memory.
3748 */
3749 IEM_MC_BEGIN(0, 2);
3750 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3752
3753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3756 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3757 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3758 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3759
3760 IEM_MC_ADVANCE_RIP();
3761 IEM_MC_END();
3762 }
3763 return VINF_SUCCESS;
3764}
3765
3766/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3767FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3768{
3769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3770 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3772 {
3773 /*
3774 * Register, register.
3775 */
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3779 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3780 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3781 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 /*
3788 * Register, memory.
3789 */
3790 IEM_MC_BEGIN(0, 2);
3791 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3793
3794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3798 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3799 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3800
3801 IEM_MC_ADVANCE_RIP();
3802 IEM_MC_END();
3803 }
3804 return VINF_SUCCESS;
3805}
3806
3807
3808/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3809FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3810{
3811 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3814 {
3815 /*
3816 * Register, register.
3817 */
3818 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820
3821 IEM_MC_BEGIN(3, 0);
3822 IEM_MC_ARG(uint64_t *, pDst, 0);
3823 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3824 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3825 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3826 IEM_MC_PREPARE_FPU_USAGE();
3827 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3828 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3829 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 }
3833 else
3834 {
3835 /*
3836 * Register, memory.
3837 */
3838 IEM_MC_BEGIN(3, 2);
3839 IEM_MC_ARG(uint64_t *, pDst, 0);
3840 IEM_MC_LOCAL(uint64_t, uSrc);
3841 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3843
3844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3845 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3846 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3849
3850 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3851 IEM_MC_PREPARE_FPU_USAGE();
3852 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3853 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3854
3855 IEM_MC_ADVANCE_RIP();
3856 IEM_MC_END();
3857 }
3858 return VINF_SUCCESS;
3859}
3860
3861/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3862FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3863{
3864 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3867 {
3868 /*
3869 * Register, register.
3870 */
3871 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3873
3874 IEM_MC_BEGIN(3, 0);
3875 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3876 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3877 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3878 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3879 IEM_MC_PREPARE_SSE_USAGE();
3880 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3881 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3882 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3883 IEM_MC_ADVANCE_RIP();
3884 IEM_MC_END();
3885 }
3886 else
3887 {
3888 /*
3889 * Register, memory.
3890 */
3891 IEM_MC_BEGIN(3, 2);
3892 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3893 IEM_MC_LOCAL(RTUINT128U, uSrc);
3894 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3896
3897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3898 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3899 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3901 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3902
3903 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3904 IEM_MC_PREPARE_SSE_USAGE();
3905 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3906 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3907
3908 IEM_MC_ADVANCE_RIP();
3909 IEM_MC_END();
3910 }
3911 return VINF_SUCCESS;
3912}
3913
3914/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3915FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3916{
3917 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3919 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3920 {
3921 /*
3922 * Register, register.
3923 */
3924 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3926
3927 IEM_MC_BEGIN(3, 0);
3928 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3929 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3930 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3932 IEM_MC_PREPARE_SSE_USAGE();
3933 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3934 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3935 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3936 IEM_MC_ADVANCE_RIP();
3937 IEM_MC_END();
3938 }
3939 else
3940 {
3941 /*
3942 * Register, memory.
3943 */
3944 IEM_MC_BEGIN(3, 2);
3945 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3946 IEM_MC_LOCAL(RTUINT128U, uSrc);
3947 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3949
3950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3951 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3952 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3954 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3955
3956 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3957 IEM_MC_PREPARE_SSE_USAGE();
3958 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3959 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3960
3961 IEM_MC_ADVANCE_RIP();
3962 IEM_MC_END();
3963 }
3964 return VINF_SUCCESS;
3965}
3966
3967/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3968FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3969{
3970 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3973 {
3974 /*
3975 * Register, register.
3976 */
3977 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3979
3980 IEM_MC_BEGIN(3, 0);
3981 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3982 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3983 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3985 IEM_MC_PREPARE_SSE_USAGE();
3986 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3987 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3988 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3989 IEM_MC_ADVANCE_RIP();
3990 IEM_MC_END();
3991 }
3992 else
3993 {
3994 /*
3995 * Register, memory.
3996 */
3997 IEM_MC_BEGIN(3, 2);
3998 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3999 IEM_MC_LOCAL(RTUINT128U, uSrc);
4000 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4002
4003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4004 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4005 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4008
4009 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4010 IEM_MC_PREPARE_SSE_USAGE();
4011 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4012 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4013
4014 IEM_MC_ADVANCE_RIP();
4015 IEM_MC_END();
4016 }
4017 return VINF_SUCCESS;
4018}
4019
4020
4021/** Opcode 0x0f 0x71 11/2. */
4022FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4023
4024/** Opcode 0x66 0x0f 0x71 11/2. */
4025FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4026
4027/** Opcode 0x0f 0x71 11/4. */
4028FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4029
4030/** Opcode 0x66 0x0f 0x71 11/4. */
4031FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4032
4033/** Opcode 0x0f 0x71 11/6. */
4034FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4035
4036/** Opcode 0x66 0x0f 0x71 11/6. */
4037FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4038
4039
4040/**
4041 * Group 12 jump table for register variant.
4042 */
4043IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4044{
4045 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4046 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4047 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4048 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4049 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4050 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4051 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4052 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4053};
4054AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4055
4056
4057/** Opcode 0x0f 0x71. */
4058FNIEMOP_DEF(iemOp_Grp12)
4059{
4060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4062 /* register, register */
4063 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4064 + pVCpu->iem.s.idxPrefix], bRm);
4065 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4066}
4067
4068
4069/** Opcode 0x0f 0x72 11/2. */
4070FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4071
4072/** Opcode 0x66 0x0f 0x72 11/2. */
4073FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4074
4075/** Opcode 0x0f 0x72 11/4. */
4076FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4077
4078/** Opcode 0x66 0x0f 0x72 11/4. */
4079FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4080
4081/** Opcode 0x0f 0x72 11/6. */
4082FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4083
4084/** Opcode 0x66 0x0f 0x72 11/6. */
4085FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4086
4087
4088/**
4089 * Group 13 jump table for register variant.
4090 */
4091IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4092{
4093 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4094 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4095 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4096 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4097 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4098 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4099 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4100 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4101};
4102AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4103
4104/** Opcode 0x0f 0x72. */
4105FNIEMOP_DEF(iemOp_Grp13)
4106{
4107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4109 /* register, register */
4110 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4111 + pVCpu->iem.s.idxPrefix], bRm);
4112 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4113}
4114
4115
4116/** Opcode 0x0f 0x73 11/2. */
4117FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4118
4119/** Opcode 0x66 0x0f 0x73 11/2. */
4120FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4121
4122/** Opcode 0x66 0x0f 0x73 11/3. */
4123FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4124
4125/** Opcode 0x0f 0x73 11/6. */
4126FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4127
4128/** Opcode 0x66 0x0f 0x73 11/6. */
4129FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4130
4131/** Opcode 0x66 0x0f 0x73 11/7. */
4132FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4133
4134/**
4135 * Group 14 jump table for register variant.
4136 */
4137IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4138{
4139 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4140 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4141 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4142 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4143 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4144 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4145 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4146 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4147};
4148AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4149
4150
4151/** Opcode 0x0f 0x73. */
4152FNIEMOP_DEF(iemOp_Grp14)
4153{
4154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4156 /* register, register */
4157 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4158 + pVCpu->iem.s.idxPrefix], bRm);
4159 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4160}
4161
4162
4163/**
4164 * Common worker for MMX instructions on the form:
4165 * pxxx mm1, mm2/mem64
4166 */
4167FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4168{
4169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4171 {
4172 /*
4173 * Register, register.
4174 */
4175 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4176 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178 IEM_MC_BEGIN(2, 0);
4179 IEM_MC_ARG(uint64_t *, pDst, 0);
4180 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4181 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4182 IEM_MC_PREPARE_FPU_USAGE();
4183 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4184 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4185 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4186 IEM_MC_ADVANCE_RIP();
4187 IEM_MC_END();
4188 }
4189 else
4190 {
4191 /*
4192 * Register, memory.
4193 */
4194 IEM_MC_BEGIN(2, 2);
4195 IEM_MC_ARG(uint64_t *, pDst, 0);
4196 IEM_MC_LOCAL(uint64_t, uSrc);
4197 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4199
4200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4202 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4203 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4204
4205 IEM_MC_PREPARE_FPU_USAGE();
4206 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4207 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4208
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/**
4217 * Common worker for SSE2 instructions on the forms:
4218 * pxxx xmm1, xmm2/mem128
4219 *
4220 * Proper alignment of the 128-bit operand is enforced.
4221 * Exceptions type 4. SSE2 cpuid checks.
4222 */
4223FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4224{
4225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /*
4229 * Register, register.
4230 */
4231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4232 IEM_MC_BEGIN(2, 0);
4233 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4234 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4235 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4236 IEM_MC_PREPARE_SSE_USAGE();
4237 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4238 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4239 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4240 IEM_MC_ADVANCE_RIP();
4241 IEM_MC_END();
4242 }
4243 else
4244 {
4245 /*
4246 * Register, memory.
4247 */
4248 IEM_MC_BEGIN(2, 2);
4249 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4250 IEM_MC_LOCAL(RTUINT128U, uSrc);
4251 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4253
4254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4256 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4257 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4258
4259 IEM_MC_PREPARE_SSE_USAGE();
4260 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4261 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4262
4263 IEM_MC_ADVANCE_RIP();
4264 IEM_MC_END();
4265 }
4266 return VINF_SUCCESS;
4267}
4268
4269
4270/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4271FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4272{
4273 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4274 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4275}
4276
4277/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4278FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4279{
4280 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4281 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4282}
4283
4284/* Opcode 0xf3 0x0f 0x74 - invalid */
4285/* Opcode 0xf2 0x0f 0x74 - invalid */
4286
4287
4288/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4289FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4290{
4291 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4292 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4293}
4294
4295/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4296FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4297{
4298 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4299 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4300}
4301
4302/* Opcode 0xf3 0x0f 0x75 - invalid */
4303/* Opcode 0xf2 0x0f 0x75 - invalid */
4304
4305
4306/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4307FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4308{
4309 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4310 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4311}
4312
4313/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4314FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4315{
4316 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4317 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4318}
4319
4320/* Opcode 0xf3 0x0f 0x76 - invalid */
4321/* Opcode 0xf2 0x0f 0x76 - invalid */
4322
4323
4324/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4325FNIEMOP_STUB(iemOp_emms);
4326/* Opcode 0x66 0x0f 0x77 - invalid */
4327/* Opcode 0xf3 0x0f 0x77 - invalid */
4328/* Opcode 0xf2 0x0f 0x77 - invalid */
4329
4330/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4331FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4332/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4333FNIEMOP_STUB(iemOp_AmdGrp17);
4334/* Opcode 0xf3 0x0f 0x78 - invalid */
4335/* Opcode 0xf2 0x0f 0x78 - invalid */
4336
4337/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4338FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4339/* Opcode 0x66 0x0f 0x79 - invalid */
4340/* Opcode 0xf3 0x0f 0x79 - invalid */
4341/* Opcode 0xf2 0x0f 0x79 - invalid */
4342
4343/* Opcode 0x0f 0x7a - invalid */
4344/* Opcode 0x66 0x0f 0x7a - invalid */
4345/* Opcode 0xf3 0x0f 0x7a - invalid */
4346/* Opcode 0xf2 0x0f 0x7a - invalid */
4347
4348/* Opcode 0x0f 0x7b - invalid */
4349/* Opcode 0x66 0x0f 0x7b - invalid */
4350/* Opcode 0xf3 0x0f 0x7b - invalid */
4351/* Opcode 0xf2 0x0f 0x7b - invalid */
4352
4353/* Opcode 0x0f 0x7c - invalid */
4354/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4355FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4356/* Opcode 0xf3 0x0f 0x7c - invalid */
4357/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4358FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4359
4360/* Opcode 0x0f 0x7d - invalid */
4361/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4362FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4363/* Opcode 0xf3 0x0f 0x7d - invalid */
4364/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4365FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4366
4367
4368/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4369FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4370{
4371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4372 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4373 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4374 else
4375 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4377 {
4378 /* greg, MMX */
4379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4380 IEM_MC_BEGIN(0, 1);
4381 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4382 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4383 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4384 {
4385 IEM_MC_LOCAL(uint64_t, u64Tmp);
4386 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4387 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4388 }
4389 else
4390 {
4391 IEM_MC_LOCAL(uint32_t, u32Tmp);
4392 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4393 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4394 }
4395 IEM_MC_ADVANCE_RIP();
4396 IEM_MC_END();
4397 }
4398 else
4399 {
4400 /* [mem], MMX */
4401 IEM_MC_BEGIN(0, 2);
4402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4406 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4407 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4408 {
4409 IEM_MC_LOCAL(uint64_t, u64Tmp);
4410 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4411 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4412 }
4413 else
4414 {
4415 IEM_MC_LOCAL(uint32_t, u32Tmp);
4416 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4417 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4418 }
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4426FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4427{
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4430 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4431 else
4432 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4434 {
4435 /* greg, XMM */
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437 IEM_MC_BEGIN(0, 1);
4438 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4439 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4440 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4441 {
4442 IEM_MC_LOCAL(uint64_t, u64Tmp);
4443 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4444 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4445 }
4446 else
4447 {
4448 IEM_MC_LOCAL(uint32_t, u32Tmp);
4449 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4450 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4451 }
4452 IEM_MC_ADVANCE_RIP();
4453 IEM_MC_END();
4454 }
4455 else
4456 {
4457 /* [mem], XMM */
4458 IEM_MC_BEGIN(0, 2);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4460 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4464 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4465 {
4466 IEM_MC_LOCAL(uint64_t, u64Tmp);
4467 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4468 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4469 }
4470 else
4471 {
4472 IEM_MC_LOCAL(uint32_t, u32Tmp);
4473 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4474 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4475 }
4476 IEM_MC_ADVANCE_RIP();
4477 IEM_MC_END();
4478 }
4479 return VINF_SUCCESS;
4480}
4481
4482
4483/**
4484 * @opcode 0x7e
4485 * @opcodesub !11 mr/reg
4486 * @oppfx 0xf3
4487 * @opcpuid sse2
4488 * @opgroup og_sse2_pcksclr_datamove
4489 * @opxcpttype 5
4490 * @optest op1=1 op2=2 -> op1=2
4491 * @optest op1=0 op2=-42 -> op1=-42
4492 */
4493FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4494{
4495 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4498 {
4499 /*
4500 * Register, register.
4501 */
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503 IEM_MC_BEGIN(0, 2);
4504 IEM_MC_LOCAL(uint64_t, uSrc);
4505
4506 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4508
4509 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4510 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4511
4512 IEM_MC_ADVANCE_RIP();
4513 IEM_MC_END();
4514 }
4515 else
4516 {
4517 /*
4518 * Memory, register.
4519 */
4520 IEM_MC_BEGIN(0, 2);
4521 IEM_MC_LOCAL(uint64_t, uSrc);
4522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4523
4524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4526 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4528
4529 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4530 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4531
4532 IEM_MC_ADVANCE_RIP();
4533 IEM_MC_END();
4534 }
4535 return VINF_SUCCESS;
4536}
4537
4538/* Opcode 0xf2 0x0f 0x7e - invalid */
4539
4540
4541/** Opcode 0x0f 0x7f - movq Qq, Pq */
4542FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4543{
4544 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4547 {
4548 /*
4549 * Register, register.
4550 */
4551 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4552 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4554 IEM_MC_BEGIN(0, 1);
4555 IEM_MC_LOCAL(uint64_t, u64Tmp);
4556 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4558 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4559 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 }
4563 else
4564 {
4565 /*
4566 * Register, memory.
4567 */
4568 IEM_MC_BEGIN(0, 2);
4569 IEM_MC_LOCAL(uint64_t, u64Tmp);
4570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4571
4572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4574 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4576
4577 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4578 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4579
4580 IEM_MC_ADVANCE_RIP();
4581 IEM_MC_END();
4582 }
4583 return VINF_SUCCESS;
4584}
4585
4586/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4587FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4588{
4589 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4591 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4592 {
4593 /*
4594 * Register, register.
4595 */
4596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4597 IEM_MC_BEGIN(0, 0);
4598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4600 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4601 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4602 IEM_MC_ADVANCE_RIP();
4603 IEM_MC_END();
4604 }
4605 else
4606 {
4607 /*
4608 * Register, memory.
4609 */
4610 IEM_MC_BEGIN(0, 2);
4611 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4613
4614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4616 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4618
4619 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4620 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4621
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 }
4625 return VINF_SUCCESS;
4626}
4627
4628/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4629FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4630{
4631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4632 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4634 {
4635 /*
4636 * Register, register.
4637 */
4638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4639 IEM_MC_BEGIN(0, 0);
4640 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4641 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4642 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4643 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4644 IEM_MC_ADVANCE_RIP();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 /*
4650 * Register, memory.
4651 */
4652 IEM_MC_BEGIN(0, 2);
4653 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4655
4656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4658 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4659 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4660
4661 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4662 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4663
4664 IEM_MC_ADVANCE_RIP();
4665 IEM_MC_END();
4666 }
4667 return VINF_SUCCESS;
4668}
4669
4670/* Opcode 0xf2 0x0f 0x7f - invalid */
4671
4672
4673
4674/** Opcode 0x0f 0x80. */
4675FNIEMOP_DEF(iemOp_jo_Jv)
4676{
4677 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4678 IEMOP_HLP_MIN_386();
4679 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4680 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4681 {
4682 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4684
4685 IEM_MC_BEGIN(0, 0);
4686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4687 IEM_MC_REL_JMP_S16(i16Imm);
4688 } IEM_MC_ELSE() {
4689 IEM_MC_ADVANCE_RIP();
4690 } IEM_MC_ENDIF();
4691 IEM_MC_END();
4692 }
4693 else
4694 {
4695 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4697
4698 IEM_MC_BEGIN(0, 0);
4699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4700 IEM_MC_REL_JMP_S32(i32Imm);
4701 } IEM_MC_ELSE() {
4702 IEM_MC_ADVANCE_RIP();
4703 } IEM_MC_ENDIF();
4704 IEM_MC_END();
4705 }
4706 return VINF_SUCCESS;
4707}
4708
4709
4710/** Opcode 0x0f 0x81. */
4711FNIEMOP_DEF(iemOp_jno_Jv)
4712{
4713 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4714 IEMOP_HLP_MIN_386();
4715 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4716 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4717 {
4718 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4720
4721 IEM_MC_BEGIN(0, 0);
4722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4723 IEM_MC_ADVANCE_RIP();
4724 } IEM_MC_ELSE() {
4725 IEM_MC_REL_JMP_S16(i16Imm);
4726 } IEM_MC_ENDIF();
4727 IEM_MC_END();
4728 }
4729 else
4730 {
4731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4733
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4736 IEM_MC_ADVANCE_RIP();
4737 } IEM_MC_ELSE() {
4738 IEM_MC_REL_JMP_S32(i32Imm);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_END();
4741 }
4742 return VINF_SUCCESS;
4743}
4744
4745
4746/** Opcode 0x0f 0x82. */
4747FNIEMOP_DEF(iemOp_jc_Jv)
4748{
4749 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4750 IEMOP_HLP_MIN_386();
4751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4752 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4753 {
4754 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756
4757 IEM_MC_BEGIN(0, 0);
4758 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4759 IEM_MC_REL_JMP_S16(i16Imm);
4760 } IEM_MC_ELSE() {
4761 IEM_MC_ADVANCE_RIP();
4762 } IEM_MC_ENDIF();
4763 IEM_MC_END();
4764 }
4765 else
4766 {
4767 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4769
4770 IEM_MC_BEGIN(0, 0);
4771 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4772 IEM_MC_REL_JMP_S32(i32Imm);
4773 } IEM_MC_ELSE() {
4774 IEM_MC_ADVANCE_RIP();
4775 } IEM_MC_ENDIF();
4776 IEM_MC_END();
4777 }
4778 return VINF_SUCCESS;
4779}
4780
4781
4782/** Opcode 0x0f 0x83. */
4783FNIEMOP_DEF(iemOp_jnc_Jv)
4784{
4785 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4786 IEMOP_HLP_MIN_386();
4787 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4788 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4789 {
4790 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4792
4793 IEM_MC_BEGIN(0, 0);
4794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4795 IEM_MC_ADVANCE_RIP();
4796 } IEM_MC_ELSE() {
4797 IEM_MC_REL_JMP_S16(i16Imm);
4798 } IEM_MC_ENDIF();
4799 IEM_MC_END();
4800 }
4801 else
4802 {
4803 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4805
4806 IEM_MC_BEGIN(0, 0);
4807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4808 IEM_MC_ADVANCE_RIP();
4809 } IEM_MC_ELSE() {
4810 IEM_MC_REL_JMP_S32(i32Imm);
4811 } IEM_MC_ENDIF();
4812 IEM_MC_END();
4813 }
4814 return VINF_SUCCESS;
4815}
4816
4817
4818/** Opcode 0x0f 0x84. */
4819FNIEMOP_DEF(iemOp_je_Jv)
4820{
4821 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4822 IEMOP_HLP_MIN_386();
4823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4824 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4825 {
4826 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828
4829 IEM_MC_BEGIN(0, 0);
4830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4831 IEM_MC_REL_JMP_S16(i16Imm);
4832 } IEM_MC_ELSE() {
4833 IEM_MC_ADVANCE_RIP();
4834 } IEM_MC_ENDIF();
4835 IEM_MC_END();
4836 }
4837 else
4838 {
4839 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841
4842 IEM_MC_BEGIN(0, 0);
4843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4844 IEM_MC_REL_JMP_S32(i32Imm);
4845 } IEM_MC_ELSE() {
4846 IEM_MC_ADVANCE_RIP();
4847 } IEM_MC_ENDIF();
4848 IEM_MC_END();
4849 }
4850 return VINF_SUCCESS;
4851}
4852
4853
4854/** Opcode 0x0f 0x85. */
4855FNIEMOP_DEF(iemOp_jne_Jv)
4856{
4857 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4858 IEMOP_HLP_MIN_386();
4859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4860 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4861 {
4862 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864
4865 IEM_MC_BEGIN(0, 0);
4866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4867 IEM_MC_ADVANCE_RIP();
4868 } IEM_MC_ELSE() {
4869 IEM_MC_REL_JMP_S16(i16Imm);
4870 } IEM_MC_ENDIF();
4871 IEM_MC_END();
4872 }
4873 else
4874 {
4875 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4877
4878 IEM_MC_BEGIN(0, 0);
4879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4880 IEM_MC_ADVANCE_RIP();
4881 } IEM_MC_ELSE() {
4882 IEM_MC_REL_JMP_S32(i32Imm);
4883 } IEM_MC_ENDIF();
4884 IEM_MC_END();
4885 }
4886 return VINF_SUCCESS;
4887}
4888
4889
4890/** Opcode 0x0f 0x86. */
4891FNIEMOP_DEF(iemOp_jbe_Jv)
4892{
4893 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4894 IEMOP_HLP_MIN_386();
4895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4896 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4897 {
4898 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4900
4901 IEM_MC_BEGIN(0, 0);
4902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4903 IEM_MC_REL_JMP_S16(i16Imm);
4904 } IEM_MC_ELSE() {
4905 IEM_MC_ADVANCE_RIP();
4906 } IEM_MC_ENDIF();
4907 IEM_MC_END();
4908 }
4909 else
4910 {
4911 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4913
4914 IEM_MC_BEGIN(0, 0);
4915 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4916 IEM_MC_REL_JMP_S32(i32Imm);
4917 } IEM_MC_ELSE() {
4918 IEM_MC_ADVANCE_RIP();
4919 } IEM_MC_ENDIF();
4920 IEM_MC_END();
4921 }
4922 return VINF_SUCCESS;
4923}
4924
4925
4926/** Opcode 0x0f 0x87. */
4927FNIEMOP_DEF(iemOp_jnbe_Jv)
4928{
4929 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4930 IEMOP_HLP_MIN_386();
4931 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4932 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4933 {
4934 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936
4937 IEM_MC_BEGIN(0, 0);
4938 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4939 IEM_MC_ADVANCE_RIP();
4940 } IEM_MC_ELSE() {
4941 IEM_MC_REL_JMP_S16(i16Imm);
4942 } IEM_MC_ENDIF();
4943 IEM_MC_END();
4944 }
4945 else
4946 {
4947 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4949
4950 IEM_MC_BEGIN(0, 0);
4951 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4952 IEM_MC_ADVANCE_RIP();
4953 } IEM_MC_ELSE() {
4954 IEM_MC_REL_JMP_S32(i32Imm);
4955 } IEM_MC_ENDIF();
4956 IEM_MC_END();
4957 }
4958 return VINF_SUCCESS;
4959}
4960
4961
4962/** Opcode 0x0f 0x88. */
4963FNIEMOP_DEF(iemOp_js_Jv)
4964{
4965 IEMOP_MNEMONIC(js_Jv, "js Jv");
4966 IEMOP_HLP_MIN_386();
4967 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4968 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4969 {
4970 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972
4973 IEM_MC_BEGIN(0, 0);
4974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4975 IEM_MC_REL_JMP_S16(i16Imm);
4976 } IEM_MC_ELSE() {
4977 IEM_MC_ADVANCE_RIP();
4978 } IEM_MC_ENDIF();
4979 IEM_MC_END();
4980 }
4981 else
4982 {
4983 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4985
4986 IEM_MC_BEGIN(0, 0);
4987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4988 IEM_MC_REL_JMP_S32(i32Imm);
4989 } IEM_MC_ELSE() {
4990 IEM_MC_ADVANCE_RIP();
4991 } IEM_MC_ENDIF();
4992 IEM_MC_END();
4993 }
4994 return VINF_SUCCESS;
4995}
4996
4997
4998/** Opcode 0x0f 0x89. */
4999FNIEMOP_DEF(iemOp_jns_Jv)
5000{
5001 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
5002 IEMOP_HLP_MIN_386();
5003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5004 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5005 {
5006 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008
5009 IEM_MC_BEGIN(0, 0);
5010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5011 IEM_MC_ADVANCE_RIP();
5012 } IEM_MC_ELSE() {
5013 IEM_MC_REL_JMP_S16(i16Imm);
5014 } IEM_MC_ENDIF();
5015 IEM_MC_END();
5016 }
5017 else
5018 {
5019 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5021
5022 IEM_MC_BEGIN(0, 0);
5023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5024 IEM_MC_ADVANCE_RIP();
5025 } IEM_MC_ELSE() {
5026 IEM_MC_REL_JMP_S32(i32Imm);
5027 } IEM_MC_ENDIF();
5028 IEM_MC_END();
5029 }
5030 return VINF_SUCCESS;
5031}
5032
5033
5034/** Opcode 0x0f 0x8a. */
5035FNIEMOP_DEF(iemOp_jp_Jv)
5036{
5037 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5038 IEMOP_HLP_MIN_386();
5039 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5040 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5041 {
5042 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5044
5045 IEM_MC_BEGIN(0, 0);
5046 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5047 IEM_MC_REL_JMP_S16(i16Imm);
5048 } IEM_MC_ELSE() {
5049 IEM_MC_ADVANCE_RIP();
5050 } IEM_MC_ENDIF();
5051 IEM_MC_END();
5052 }
5053 else
5054 {
5055 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057
5058 IEM_MC_BEGIN(0, 0);
5059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5060 IEM_MC_REL_JMP_S32(i32Imm);
5061 } IEM_MC_ELSE() {
5062 IEM_MC_ADVANCE_RIP();
5063 } IEM_MC_ENDIF();
5064 IEM_MC_END();
5065 }
5066 return VINF_SUCCESS;
5067}
5068
5069
5070/** Opcode 0x0f 0x8b. */
5071FNIEMOP_DEF(iemOp_jnp_Jv)
5072{
5073 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5074 IEMOP_HLP_MIN_386();
5075 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5076 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5077 {
5078 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5080
5081 IEM_MC_BEGIN(0, 0);
5082 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5083 IEM_MC_ADVANCE_RIP();
5084 } IEM_MC_ELSE() {
5085 IEM_MC_REL_JMP_S16(i16Imm);
5086 } IEM_MC_ENDIF();
5087 IEM_MC_END();
5088 }
5089 else
5090 {
5091 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5093
5094 IEM_MC_BEGIN(0, 0);
5095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5096 IEM_MC_ADVANCE_RIP();
5097 } IEM_MC_ELSE() {
5098 IEM_MC_REL_JMP_S32(i32Imm);
5099 } IEM_MC_ENDIF();
5100 IEM_MC_END();
5101 }
5102 return VINF_SUCCESS;
5103}
5104
5105
5106/** Opcode 0x0f 0x8c. */
5107FNIEMOP_DEF(iemOp_jl_Jv)
5108{
5109 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5110 IEMOP_HLP_MIN_386();
5111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5112 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5113 {
5114 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116
5117 IEM_MC_BEGIN(0, 0);
5118 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5119 IEM_MC_REL_JMP_S16(i16Imm);
5120 } IEM_MC_ELSE() {
5121 IEM_MC_ADVANCE_RIP();
5122 } IEM_MC_ENDIF();
5123 IEM_MC_END();
5124 }
5125 else
5126 {
5127 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129
5130 IEM_MC_BEGIN(0, 0);
5131 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5132 IEM_MC_REL_JMP_S32(i32Imm);
5133 } IEM_MC_ELSE() {
5134 IEM_MC_ADVANCE_RIP();
5135 } IEM_MC_ENDIF();
5136 IEM_MC_END();
5137 }
5138 return VINF_SUCCESS;
5139}
5140
5141
5142/** Opcode 0x0f 0x8d. */
5143FNIEMOP_DEF(iemOp_jnl_Jv)
5144{
5145 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5146 IEMOP_HLP_MIN_386();
5147 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5148 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5149 {
5150 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152
5153 IEM_MC_BEGIN(0, 0);
5154 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5155 IEM_MC_ADVANCE_RIP();
5156 } IEM_MC_ELSE() {
5157 IEM_MC_REL_JMP_S16(i16Imm);
5158 } IEM_MC_ENDIF();
5159 IEM_MC_END();
5160 }
5161 else
5162 {
5163 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165
5166 IEM_MC_BEGIN(0, 0);
5167 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5168 IEM_MC_ADVANCE_RIP();
5169 } IEM_MC_ELSE() {
5170 IEM_MC_REL_JMP_S32(i32Imm);
5171 } IEM_MC_ENDIF();
5172 IEM_MC_END();
5173 }
5174 return VINF_SUCCESS;
5175}
5176
5177
5178/** Opcode 0x0f 0x8e. */
5179FNIEMOP_DEF(iemOp_jle_Jv)
5180{
5181 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5182 IEMOP_HLP_MIN_386();
5183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5184 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5185 {
5186 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188
5189 IEM_MC_BEGIN(0, 0);
5190 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5191 IEM_MC_REL_JMP_S16(i16Imm);
5192 } IEM_MC_ELSE() {
5193 IEM_MC_ADVANCE_RIP();
5194 } IEM_MC_ENDIF();
5195 IEM_MC_END();
5196 }
5197 else
5198 {
5199 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201
5202 IEM_MC_BEGIN(0, 0);
5203 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5204 IEM_MC_REL_JMP_S32(i32Imm);
5205 } IEM_MC_ELSE() {
5206 IEM_MC_ADVANCE_RIP();
5207 } IEM_MC_ENDIF();
5208 IEM_MC_END();
5209 }
5210 return VINF_SUCCESS;
5211}
5212
5213
5214/** Opcode 0x0f 0x8f. */
5215FNIEMOP_DEF(iemOp_jnle_Jv)
5216{
5217 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5218 IEMOP_HLP_MIN_386();
5219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5220 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5221 {
5222 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224
5225 IEM_MC_BEGIN(0, 0);
5226 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5227 IEM_MC_ADVANCE_RIP();
5228 } IEM_MC_ELSE() {
5229 IEM_MC_REL_JMP_S16(i16Imm);
5230 } IEM_MC_ENDIF();
5231 IEM_MC_END();
5232 }
5233 else
5234 {
5235 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237
5238 IEM_MC_BEGIN(0, 0);
5239 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5240 IEM_MC_ADVANCE_RIP();
5241 } IEM_MC_ELSE() {
5242 IEM_MC_REL_JMP_S32(i32Imm);
5243 } IEM_MC_ENDIF();
5244 IEM_MC_END();
5245 }
5246 return VINF_SUCCESS;
5247}
5248
5249
5250/** Opcode 0x0f 0x90. */
5251FNIEMOP_DEF(iemOp_seto_Eb)
5252{
5253 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5254 IEMOP_HLP_MIN_386();
5255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5256
5257 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5258 * any way. AMD says it's "unused", whatever that means. We're
5259 * ignoring for now. */
5260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5261 {
5262 /* register target */
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5264 IEM_MC_BEGIN(0, 0);
5265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5266 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5267 } IEM_MC_ELSE() {
5268 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5269 } IEM_MC_ENDIF();
5270 IEM_MC_ADVANCE_RIP();
5271 IEM_MC_END();
5272 }
5273 else
5274 {
5275 /* memory target */
5276 IEM_MC_BEGIN(0, 1);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5281 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5282 } IEM_MC_ELSE() {
5283 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5284 } IEM_MC_ENDIF();
5285 IEM_MC_ADVANCE_RIP();
5286 IEM_MC_END();
5287 }
5288 return VINF_SUCCESS;
5289}
5290
5291
5292/** Opcode 0x0f 0x91. */
5293FNIEMOP_DEF(iemOp_setno_Eb)
5294{
5295 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5296 IEMOP_HLP_MIN_386();
5297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5298
5299 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5300 * any way. AMD says it's "unused", whatever that means. We're
5301 * ignoring for now. */
5302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5303 {
5304 /* register target */
5305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5306 IEM_MC_BEGIN(0, 0);
5307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5308 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5309 } IEM_MC_ELSE() {
5310 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5311 } IEM_MC_ENDIF();
5312 IEM_MC_ADVANCE_RIP();
5313 IEM_MC_END();
5314 }
5315 else
5316 {
5317 /* memory target */
5318 IEM_MC_BEGIN(0, 1);
5319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5324 } IEM_MC_ELSE() {
5325 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5326 } IEM_MC_ENDIF();
5327 IEM_MC_ADVANCE_RIP();
5328 IEM_MC_END();
5329 }
5330 return VINF_SUCCESS;
5331}
5332
5333
5334/** Opcode 0x0f 0x92. */
5335FNIEMOP_DEF(iemOp_setc_Eb)
5336{
5337 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5338 IEMOP_HLP_MIN_386();
5339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5340
5341 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5342 * any way. AMD says it's "unused", whatever that means. We're
5343 * ignoring for now. */
5344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5345 {
5346 /* register target */
5347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5348 IEM_MC_BEGIN(0, 0);
5349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5350 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5351 } IEM_MC_ELSE() {
5352 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5353 } IEM_MC_ENDIF();
5354 IEM_MC_ADVANCE_RIP();
5355 IEM_MC_END();
5356 }
5357 else
5358 {
5359 /* memory target */
5360 IEM_MC_BEGIN(0, 1);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5365 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5366 } IEM_MC_ELSE() {
5367 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5368 } IEM_MC_ENDIF();
5369 IEM_MC_ADVANCE_RIP();
5370 IEM_MC_END();
5371 }
5372 return VINF_SUCCESS;
5373}
5374
5375
5376/** Opcode 0x0f 0x93. */
5377FNIEMOP_DEF(iemOp_setnc_Eb)
5378{
5379 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5380 IEMOP_HLP_MIN_386();
5381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5382
5383 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5384 * any way. AMD says it's "unused", whatever that means. We're
5385 * ignoring for now. */
5386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5387 {
5388 /* register target */
5389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5390 IEM_MC_BEGIN(0, 0);
5391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5392 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5393 } IEM_MC_ELSE() {
5394 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5395 } IEM_MC_ENDIF();
5396 IEM_MC_ADVANCE_RIP();
5397 IEM_MC_END();
5398 }
5399 else
5400 {
5401 /* memory target */
5402 IEM_MC_BEGIN(0, 1);
5403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5406 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5407 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5408 } IEM_MC_ELSE() {
5409 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5410 } IEM_MC_ENDIF();
5411 IEM_MC_ADVANCE_RIP();
5412 IEM_MC_END();
5413 }
5414 return VINF_SUCCESS;
5415}
5416
5417
5418/** Opcode 0x0f 0x94. */
5419FNIEMOP_DEF(iemOp_sete_Eb)
5420{
5421 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5422 IEMOP_HLP_MIN_386();
5423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5424
5425 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5426 * any way. AMD says it's "unused", whatever that means. We're
5427 * ignoring for now. */
5428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5429 {
5430 /* register target */
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_BEGIN(0, 0);
5433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5434 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5435 } IEM_MC_ELSE() {
5436 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5437 } IEM_MC_ENDIF();
5438 IEM_MC_ADVANCE_RIP();
5439 IEM_MC_END();
5440 }
5441 else
5442 {
5443 /* memory target */
5444 IEM_MC_BEGIN(0, 1);
5445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5448 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5449 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5450 } IEM_MC_ELSE() {
5451 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5452 } IEM_MC_ENDIF();
5453 IEM_MC_ADVANCE_RIP();
5454 IEM_MC_END();
5455 }
5456 return VINF_SUCCESS;
5457}
5458
5459
5460/** Opcode 0x0f 0x95. */
5461FNIEMOP_DEF(iemOp_setne_Eb)
5462{
5463 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5464 IEMOP_HLP_MIN_386();
5465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5466
5467 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5468 * any way. AMD says it's "unused", whatever that means. We're
5469 * ignoring for now. */
5470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5471 {
5472 /* register target */
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_BEGIN(0, 0);
5475 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5476 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5477 } IEM_MC_ELSE() {
5478 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5479 } IEM_MC_ENDIF();
5480 IEM_MC_ADVANCE_RIP();
5481 IEM_MC_END();
5482 }
5483 else
5484 {
5485 /* memory target */
5486 IEM_MC_BEGIN(0, 1);
5487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5491 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5492 } IEM_MC_ELSE() {
5493 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5494 } IEM_MC_ENDIF();
5495 IEM_MC_ADVANCE_RIP();
5496 IEM_MC_END();
5497 }
5498 return VINF_SUCCESS;
5499}
5500
5501
5502/** Opcode 0x0f 0x96. */
5503FNIEMOP_DEF(iemOp_setbe_Eb)
5504{
5505 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5506 IEMOP_HLP_MIN_386();
5507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5508
5509 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5510 * any way. AMD says it's "unused", whatever that means. We're
5511 * ignoring for now. */
5512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5513 {
5514 /* register target */
5515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5516 IEM_MC_BEGIN(0, 0);
5517 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5518 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5519 } IEM_MC_ELSE() {
5520 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5521 } IEM_MC_ENDIF();
5522 IEM_MC_ADVANCE_RIP();
5523 IEM_MC_END();
5524 }
5525 else
5526 {
5527 /* memory target */
5528 IEM_MC_BEGIN(0, 1);
5529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5533 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5534 } IEM_MC_ELSE() {
5535 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5536 } IEM_MC_ENDIF();
5537 IEM_MC_ADVANCE_RIP();
5538 IEM_MC_END();
5539 }
5540 return VINF_SUCCESS;
5541}
5542
5543
5544/** Opcode 0x0f 0x97. */
5545FNIEMOP_DEF(iemOp_setnbe_Eb)
5546{
5547 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5548 IEMOP_HLP_MIN_386();
5549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5550
5551 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5552 * any way. AMD says it's "unused", whatever that means. We're
5553 * ignoring for now. */
5554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5555 {
5556 /* register target */
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 IEM_MC_BEGIN(0, 0);
5559 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5560 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5561 } IEM_MC_ELSE() {
5562 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5563 } IEM_MC_ENDIF();
5564 IEM_MC_ADVANCE_RIP();
5565 IEM_MC_END();
5566 }
5567 else
5568 {
5569 /* memory target */
5570 IEM_MC_BEGIN(0, 1);
5571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5574 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5576 } IEM_MC_ELSE() {
5577 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5578 } IEM_MC_ENDIF();
5579 IEM_MC_ADVANCE_RIP();
5580 IEM_MC_END();
5581 }
5582 return VINF_SUCCESS;
5583}
5584
5585
5586/** Opcode 0x0f 0x98. */
5587FNIEMOP_DEF(iemOp_sets_Eb)
5588{
5589 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5590 IEMOP_HLP_MIN_386();
5591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5592
5593 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5594 * any way. AMD says it's "unused", whatever that means. We're
5595 * ignoring for now. */
5596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5597 {
5598 /* register target */
5599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5600 IEM_MC_BEGIN(0, 0);
5601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5603 } IEM_MC_ELSE() {
5604 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5605 } IEM_MC_ENDIF();
5606 IEM_MC_ADVANCE_RIP();
5607 IEM_MC_END();
5608 }
5609 else
5610 {
5611 /* memory target */
5612 IEM_MC_BEGIN(0, 1);
5613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5617 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5618 } IEM_MC_ELSE() {
5619 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5620 } IEM_MC_ENDIF();
5621 IEM_MC_ADVANCE_RIP();
5622 IEM_MC_END();
5623 }
5624 return VINF_SUCCESS;
5625}
5626
5627
5628/** Opcode 0x0f 0x99. */
5629FNIEMOP_DEF(iemOp_setns_Eb)
5630{
5631 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5632 IEMOP_HLP_MIN_386();
5633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5634
5635 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5636 * any way. AMD says it's "unused", whatever that means. We're
5637 * ignoring for now. */
5638 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5639 {
5640 /* register target */
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_BEGIN(0, 0);
5643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5644 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5645 } IEM_MC_ELSE() {
5646 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5647 } IEM_MC_ENDIF();
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 }
5651 else
5652 {
5653 /* memory target */
5654 IEM_MC_BEGIN(0, 1);
5655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5660 } IEM_MC_ELSE() {
5661 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5662 } IEM_MC_ENDIF();
5663 IEM_MC_ADVANCE_RIP();
5664 IEM_MC_END();
5665 }
5666 return VINF_SUCCESS;
5667}
5668
5669
5670/** Opcode 0x0f 0x9a. */
5671FNIEMOP_DEF(iemOp_setp_Eb)
5672{
5673 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5674 IEMOP_HLP_MIN_386();
5675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5676
5677 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5678 * any way. AMD says it's "unused", whatever that means. We're
5679 * ignoring for now. */
5680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5681 {
5682 /* register target */
5683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5684 IEM_MC_BEGIN(0, 0);
5685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5687 } IEM_MC_ELSE() {
5688 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5689 } IEM_MC_ENDIF();
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 }
5693 else
5694 {
5695 /* memory target */
5696 IEM_MC_BEGIN(0, 1);
5697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5701 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5702 } IEM_MC_ELSE() {
5703 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5704 } IEM_MC_ENDIF();
5705 IEM_MC_ADVANCE_RIP();
5706 IEM_MC_END();
5707 }
5708 return VINF_SUCCESS;
5709}
5710
5711
5712/** Opcode 0x0f 0x9b. */
5713FNIEMOP_DEF(iemOp_setnp_Eb)
5714{
5715 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5716 IEMOP_HLP_MIN_386();
5717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5718
5719 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5720 * any way. AMD says it's "unused", whatever that means. We're
5721 * ignoring for now. */
5722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5723 {
5724 /* register target */
5725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5726 IEM_MC_BEGIN(0, 0);
5727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5728 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5729 } IEM_MC_ELSE() {
5730 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5731 } IEM_MC_ENDIF();
5732 IEM_MC_ADVANCE_RIP();
5733 IEM_MC_END();
5734 }
5735 else
5736 {
5737 /* memory target */
5738 IEM_MC_BEGIN(0, 1);
5739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5743 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5744 } IEM_MC_ELSE() {
5745 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5746 } IEM_MC_ENDIF();
5747 IEM_MC_ADVANCE_RIP();
5748 IEM_MC_END();
5749 }
5750 return VINF_SUCCESS;
5751}
5752
5753
5754/** Opcode 0x0f 0x9c. */
5755FNIEMOP_DEF(iemOp_setl_Eb)
5756{
5757 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5758 IEMOP_HLP_MIN_386();
5759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5760
5761 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5762 * any way. AMD says it's "unused", whatever that means. We're
5763 * ignoring for now. */
5764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5765 {
5766 /* register target */
5767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5768 IEM_MC_BEGIN(0, 0);
5769 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5770 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5771 } IEM_MC_ELSE() {
5772 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5773 } IEM_MC_ENDIF();
5774 IEM_MC_ADVANCE_RIP();
5775 IEM_MC_END();
5776 }
5777 else
5778 {
5779 /* memory target */
5780 IEM_MC_BEGIN(0, 1);
5781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5784 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5785 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5786 } IEM_MC_ELSE() {
5787 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5788 } IEM_MC_ENDIF();
5789 IEM_MC_ADVANCE_RIP();
5790 IEM_MC_END();
5791 }
5792 return VINF_SUCCESS;
5793}
5794
5795
5796/** Opcode 0x0f 0x9d. */
5797FNIEMOP_DEF(iemOp_setnl_Eb)
5798{
5799 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5800 IEMOP_HLP_MIN_386();
5801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5802
5803 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5804 * any way. AMD says it's "unused", whatever that means. We're
5805 * ignoring for now. */
5806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5807 {
5808 /* register target */
5809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5810 IEM_MC_BEGIN(0, 0);
5811 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5812 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5813 } IEM_MC_ELSE() {
5814 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5815 } IEM_MC_ENDIF();
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 }
5819 else
5820 {
5821 /* memory target */
5822 IEM_MC_BEGIN(0, 1);
5823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5826 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5827 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5828 } IEM_MC_ELSE() {
5829 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5830 } IEM_MC_ENDIF();
5831 IEM_MC_ADVANCE_RIP();
5832 IEM_MC_END();
5833 }
5834 return VINF_SUCCESS;
5835}
5836
5837
5838/** Opcode 0x0f 0x9e. */
5839FNIEMOP_DEF(iemOp_setle_Eb)
5840{
5841 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5842 IEMOP_HLP_MIN_386();
5843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5844
5845 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5846 * any way. AMD says it's "unused", whatever that means. We're
5847 * ignoring for now. */
5848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5849 {
5850 /* register target */
5851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5852 IEM_MC_BEGIN(0, 0);
5853 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5854 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5855 } IEM_MC_ELSE() {
5856 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5857 } IEM_MC_ENDIF();
5858 IEM_MC_ADVANCE_RIP();
5859 IEM_MC_END();
5860 }
5861 else
5862 {
5863 /* memory target */
5864 IEM_MC_BEGIN(0, 1);
5865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5868 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5869 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5870 } IEM_MC_ELSE() {
5871 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5872 } IEM_MC_ENDIF();
5873 IEM_MC_ADVANCE_RIP();
5874 IEM_MC_END();
5875 }
5876 return VINF_SUCCESS;
5877}
5878
5879
5880/** Opcode 0x0f 0x9f. */
5881FNIEMOP_DEF(iemOp_setnle_Eb)
5882{
5883 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5884 IEMOP_HLP_MIN_386();
5885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5886
5887 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5888 * any way. AMD says it's "unused", whatever that means. We're
5889 * ignoring for now. */
5890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5891 {
5892 /* register target */
5893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5894 IEM_MC_BEGIN(0, 0);
5895 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5897 } IEM_MC_ELSE() {
5898 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5899 } IEM_MC_ENDIF();
5900 IEM_MC_ADVANCE_RIP();
5901 IEM_MC_END();
5902 }
5903 else
5904 {
5905 /* memory target */
5906 IEM_MC_BEGIN(0, 1);
5907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5910 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5911 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5912 } IEM_MC_ELSE() {
5913 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5914 } IEM_MC_ENDIF();
5915 IEM_MC_ADVANCE_RIP();
5916 IEM_MC_END();
5917 }
5918 return VINF_SUCCESS;
5919}
5920
5921
5922/**
5923 * Common 'push segment-register' helper.
5924 */
5925FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5926{
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5929 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5930
5931 switch (pVCpu->iem.s.enmEffOpSize)
5932 {
5933 case IEMMODE_16BIT:
5934 IEM_MC_BEGIN(0, 1);
5935 IEM_MC_LOCAL(uint16_t, u16Value);
5936 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5937 IEM_MC_PUSH_U16(u16Value);
5938 IEM_MC_ADVANCE_RIP();
5939 IEM_MC_END();
5940 break;
5941
5942 case IEMMODE_32BIT:
5943 IEM_MC_BEGIN(0, 1);
5944 IEM_MC_LOCAL(uint32_t, u32Value);
5945 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5946 IEM_MC_PUSH_U32_SREG(u32Value);
5947 IEM_MC_ADVANCE_RIP();
5948 IEM_MC_END();
5949 break;
5950
5951 case IEMMODE_64BIT:
5952 IEM_MC_BEGIN(0, 1);
5953 IEM_MC_LOCAL(uint64_t, u64Value);
5954 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5955 IEM_MC_PUSH_U64(u64Value);
5956 IEM_MC_ADVANCE_RIP();
5957 IEM_MC_END();
5958 break;
5959 }
5960
5961 return VINF_SUCCESS;
5962}
5963
5964
5965/** Opcode 0x0f 0xa0. */
5966FNIEMOP_DEF(iemOp_push_fs)
5967{
5968 IEMOP_MNEMONIC(push_fs, "push fs");
5969 IEMOP_HLP_MIN_386();
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5972}
5973
5974
5975/** Opcode 0x0f 0xa1. */
5976FNIEMOP_DEF(iemOp_pop_fs)
5977{
5978 IEMOP_MNEMONIC(pop_fs, "pop fs");
5979 IEMOP_HLP_MIN_386();
5980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5981 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5982}
5983
5984
5985/** Opcode 0x0f 0xa2. */
5986FNIEMOP_DEF(iemOp_cpuid)
5987{
5988 IEMOP_MNEMONIC(cpuid, "cpuid");
5989 IEMOP_HLP_MIN_486(); /* not all 486es. */
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5992}
5993
5994
5995/**
5996 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5997 * iemOp_bts_Ev_Gv.
5998 */
5999FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
6000{
6001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6002 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6003
6004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6005 {
6006 /* register destination. */
6007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6008 switch (pVCpu->iem.s.enmEffOpSize)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(3, 0);
6012 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6013 IEM_MC_ARG(uint16_t, u16Src, 1);
6014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6015
6016 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6017 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6018 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6019 IEM_MC_REF_EFLAGS(pEFlags);
6020 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6021
6022 IEM_MC_ADVANCE_RIP();
6023 IEM_MC_END();
6024 return VINF_SUCCESS;
6025
6026 case IEMMODE_32BIT:
6027 IEM_MC_BEGIN(3, 0);
6028 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6029 IEM_MC_ARG(uint32_t, u32Src, 1);
6030 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6031
6032 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6033 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6034 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6035 IEM_MC_REF_EFLAGS(pEFlags);
6036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6037
6038 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6039 IEM_MC_ADVANCE_RIP();
6040 IEM_MC_END();
6041 return VINF_SUCCESS;
6042
6043 case IEMMODE_64BIT:
6044 IEM_MC_BEGIN(3, 0);
6045 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6046 IEM_MC_ARG(uint64_t, u64Src, 1);
6047 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6048
6049 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6050 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6051 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6052 IEM_MC_REF_EFLAGS(pEFlags);
6053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6054
6055 IEM_MC_ADVANCE_RIP();
6056 IEM_MC_END();
6057 return VINF_SUCCESS;
6058
6059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6060 }
6061 }
6062 else
6063 {
6064 /* memory destination. */
6065
6066 uint32_t fAccess;
6067 if (pImpl->pfnLockedU16)
6068 fAccess = IEM_ACCESS_DATA_RW;
6069 else /* BT */
6070 fAccess = IEM_ACCESS_DATA_R;
6071
6072 /** @todo test negative bit offsets! */
6073 switch (pVCpu->iem.s.enmEffOpSize)
6074 {
6075 case IEMMODE_16BIT:
6076 IEM_MC_BEGIN(3, 2);
6077 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6078 IEM_MC_ARG(uint16_t, u16Src, 1);
6079 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6081 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6082
6083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6084 if (pImpl->pfnLockedU16)
6085 IEMOP_HLP_DONE_DECODING();
6086 else
6087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6088 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6089 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6090 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6091 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6092 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6093 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6094 IEM_MC_FETCH_EFLAGS(EFlags);
6095
6096 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6097 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6098 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6099 else
6100 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6102
6103 IEM_MC_COMMIT_EFLAGS(EFlags);
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 case IEMMODE_32BIT:
6109 IEM_MC_BEGIN(3, 2);
6110 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6111 IEM_MC_ARG(uint32_t, u32Src, 1);
6112 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6114 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6115
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6117 if (pImpl->pfnLockedU16)
6118 IEMOP_HLP_DONE_DECODING();
6119 else
6120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6121 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6122 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6123 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6124 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6125 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6126 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6127 IEM_MC_FETCH_EFLAGS(EFlags);
6128
6129 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6130 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6131 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6132 else
6133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6135
6136 IEM_MC_COMMIT_EFLAGS(EFlags);
6137 IEM_MC_ADVANCE_RIP();
6138 IEM_MC_END();
6139 return VINF_SUCCESS;
6140
6141 case IEMMODE_64BIT:
6142 IEM_MC_BEGIN(3, 2);
6143 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6144 IEM_MC_ARG(uint64_t, u64Src, 1);
6145 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6147 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6148
6149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6150 if (pImpl->pfnLockedU16)
6151 IEMOP_HLP_DONE_DECODING();
6152 else
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6155 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6156 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6157 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6158 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6159 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6160 IEM_MC_FETCH_EFLAGS(EFlags);
6161
6162 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6165 else
6166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6168
6169 IEM_MC_COMMIT_EFLAGS(EFlags);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6175 }
6176 }
6177}
6178
6179
6180/** Opcode 0x0f 0xa3. */
6181FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6182{
6183 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6184 IEMOP_HLP_MIN_386();
6185 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6186}
6187
6188
6189/**
6190 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6191 */
6192FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6193{
6194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6195 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6196
6197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6198 {
6199 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6201
6202 switch (pVCpu->iem.s.enmEffOpSize)
6203 {
6204 case IEMMODE_16BIT:
6205 IEM_MC_BEGIN(4, 0);
6206 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6207 IEM_MC_ARG(uint16_t, u16Src, 1);
6208 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6209 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6210
6211 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6212 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6213 IEM_MC_REF_EFLAGS(pEFlags);
6214 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6215
6216 IEM_MC_ADVANCE_RIP();
6217 IEM_MC_END();
6218 return VINF_SUCCESS;
6219
6220 case IEMMODE_32BIT:
6221 IEM_MC_BEGIN(4, 0);
6222 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6223 IEM_MC_ARG(uint32_t, u32Src, 1);
6224 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6225 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6226
6227 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6228 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6229 IEM_MC_REF_EFLAGS(pEFlags);
6230 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6231
6232 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6233 IEM_MC_ADVANCE_RIP();
6234 IEM_MC_END();
6235 return VINF_SUCCESS;
6236
6237 case IEMMODE_64BIT:
6238 IEM_MC_BEGIN(4, 0);
6239 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6240 IEM_MC_ARG(uint64_t, u64Src, 1);
6241 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6242 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6243
6244 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6245 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6246 IEM_MC_REF_EFLAGS(pEFlags);
6247 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6248
6249 IEM_MC_ADVANCE_RIP();
6250 IEM_MC_END();
6251 return VINF_SUCCESS;
6252
6253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6254 }
6255 }
6256 else
6257 {
6258 switch (pVCpu->iem.s.enmEffOpSize)
6259 {
6260 case IEMMODE_16BIT:
6261 IEM_MC_BEGIN(4, 2);
6262 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6263 IEM_MC_ARG(uint16_t, u16Src, 1);
6264 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6265 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6267
6268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6269 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6270 IEM_MC_ASSIGN(cShiftArg, cShift);
6271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6272 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6273 IEM_MC_FETCH_EFLAGS(EFlags);
6274 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6275 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6276
6277 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6278 IEM_MC_COMMIT_EFLAGS(EFlags);
6279 IEM_MC_ADVANCE_RIP();
6280 IEM_MC_END();
6281 return VINF_SUCCESS;
6282
6283 case IEMMODE_32BIT:
6284 IEM_MC_BEGIN(4, 2);
6285 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6286 IEM_MC_ARG(uint32_t, u32Src, 1);
6287 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6288 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6290
6291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6292 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6293 IEM_MC_ASSIGN(cShiftArg, cShift);
6294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6295 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6296 IEM_MC_FETCH_EFLAGS(EFlags);
6297 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6298 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6299
6300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6301 IEM_MC_COMMIT_EFLAGS(EFlags);
6302 IEM_MC_ADVANCE_RIP();
6303 IEM_MC_END();
6304 return VINF_SUCCESS;
6305
6306 case IEMMODE_64BIT:
6307 IEM_MC_BEGIN(4, 2);
6308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6309 IEM_MC_ARG(uint64_t, u64Src, 1);
6310 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6311 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6313
6314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6315 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6316 IEM_MC_ASSIGN(cShiftArg, cShift);
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6318 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6319 IEM_MC_FETCH_EFLAGS(EFlags);
6320 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6321 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6322
6323 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6324 IEM_MC_COMMIT_EFLAGS(EFlags);
6325 IEM_MC_ADVANCE_RIP();
6326 IEM_MC_END();
6327 return VINF_SUCCESS;
6328
6329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6330 }
6331 }
6332}
6333
6334
6335/**
6336 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6337 */
6338FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6339{
6340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6341 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6342
6343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6344 {
6345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6346
6347 switch (pVCpu->iem.s.enmEffOpSize)
6348 {
6349 case IEMMODE_16BIT:
6350 IEM_MC_BEGIN(4, 0);
6351 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6352 IEM_MC_ARG(uint16_t, u16Src, 1);
6353 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6354 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6355
6356 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6357 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6358 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6359 IEM_MC_REF_EFLAGS(pEFlags);
6360 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6361
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 return VINF_SUCCESS;
6365
6366 case IEMMODE_32BIT:
6367 IEM_MC_BEGIN(4, 0);
6368 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6369 IEM_MC_ARG(uint32_t, u32Src, 1);
6370 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6371 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6372
6373 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6374 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6375 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6376 IEM_MC_REF_EFLAGS(pEFlags);
6377 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6378
6379 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6380 IEM_MC_ADVANCE_RIP();
6381 IEM_MC_END();
6382 return VINF_SUCCESS;
6383
6384 case IEMMODE_64BIT:
6385 IEM_MC_BEGIN(4, 0);
6386 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6387 IEM_MC_ARG(uint64_t, u64Src, 1);
6388 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6389 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6390
6391 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6392 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6393 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6394 IEM_MC_REF_EFLAGS(pEFlags);
6395 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6396
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6402 }
6403 }
6404 else
6405 {
6406 switch (pVCpu->iem.s.enmEffOpSize)
6407 {
6408 case IEMMODE_16BIT:
6409 IEM_MC_BEGIN(4, 2);
6410 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6411 IEM_MC_ARG(uint16_t, u16Src, 1);
6412 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6413 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6415
6416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6419 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6420 IEM_MC_FETCH_EFLAGS(EFlags);
6421 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6422 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6423
6424 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6425 IEM_MC_COMMIT_EFLAGS(EFlags);
6426 IEM_MC_ADVANCE_RIP();
6427 IEM_MC_END();
6428 return VINF_SUCCESS;
6429
6430 case IEMMODE_32BIT:
6431 IEM_MC_BEGIN(4, 2);
6432 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6433 IEM_MC_ARG(uint32_t, u32Src, 1);
6434 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6435 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6437
6438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6440 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6441 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6442 IEM_MC_FETCH_EFLAGS(EFlags);
6443 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6444 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6445
6446 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6447 IEM_MC_COMMIT_EFLAGS(EFlags);
6448 IEM_MC_ADVANCE_RIP();
6449 IEM_MC_END();
6450 return VINF_SUCCESS;
6451
6452 case IEMMODE_64BIT:
6453 IEM_MC_BEGIN(4, 2);
6454 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6455 IEM_MC_ARG(uint64_t, u64Src, 1);
6456 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6459
6460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6463 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6464 IEM_MC_FETCH_EFLAGS(EFlags);
6465 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6466 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6467
6468 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6469 IEM_MC_COMMIT_EFLAGS(EFlags);
6470 IEM_MC_ADVANCE_RIP();
6471 IEM_MC_END();
6472 return VINF_SUCCESS;
6473
6474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6475 }
6476 }
6477}
6478
6479
6480
6481/** Opcode 0x0f 0xa4. */
6482FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6483{
6484 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6485 IEMOP_HLP_MIN_386();
6486 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6487}
6488
6489
6490/** Opcode 0x0f 0xa5. */
6491FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6492{
6493 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6494 IEMOP_HLP_MIN_386();
6495 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6496}
6497
6498
6499/** Opcode 0x0f 0xa8. */
6500FNIEMOP_DEF(iemOp_push_gs)
6501{
6502 IEMOP_MNEMONIC(push_gs, "push gs");
6503 IEMOP_HLP_MIN_386();
6504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6505 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6506}
6507
6508
6509/** Opcode 0x0f 0xa9. */
6510FNIEMOP_DEF(iemOp_pop_gs)
6511{
6512 IEMOP_MNEMONIC(pop_gs, "pop gs");
6513 IEMOP_HLP_MIN_386();
6514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6515 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6516}
6517
6518
6519/** Opcode 0x0f 0xaa. */
6520FNIEMOP_DEF(iemOp_rsm)
6521{
6522 IEMOP_MNEMONIC(rsm, "rsm");
6523 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6524 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6525 * intercept). */
6526 IEMOP_BITCH_ABOUT_STUB();
6527 return IEMOP_RAISE_INVALID_OPCODE();
6528}
6529
6530//IEMOP_HLP_MIN_386();
6531
6532
6533/** Opcode 0x0f 0xab. */
6534FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6535{
6536 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6537 IEMOP_HLP_MIN_386();
6538 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6539}
6540
6541
6542/** Opcode 0x0f 0xac. */
6543FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6544{
6545 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6546 IEMOP_HLP_MIN_386();
6547 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6548}
6549
6550
6551/** Opcode 0x0f 0xad. */
6552FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6553{
6554 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6555 IEMOP_HLP_MIN_386();
6556 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6557}
6558
6559
6560/** Opcode 0x0f 0xae mem/0. */
6561FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6562{
6563 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6564 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6565 return IEMOP_RAISE_INVALID_OPCODE();
6566
6567 IEM_MC_BEGIN(3, 1);
6568 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6569 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6570 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6573 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6574 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6575 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6576 IEM_MC_END();
6577 return VINF_SUCCESS;
6578}
6579
6580
6581/** Opcode 0x0f 0xae mem/1. */
6582FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6583{
6584 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6586 return IEMOP_RAISE_INVALID_OPCODE();
6587
6588 IEM_MC_BEGIN(3, 1);
6589 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6590 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6591 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6595 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6596 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6597 IEM_MC_END();
6598 return VINF_SUCCESS;
6599}
6600
6601
6602/**
6603 * @opmaps grp15
6604 * @opcode !11/2
6605 * @oppfx none
6606 * @opcpuid sse
6607 * @opgroup og_sse_mxcsrsm
6608 * @opxcpttype 5
6609 * @optest op1=0 -> mxcsr=0
6610 * @optest op1=0x2083 -> mxcsr=0x2083
6611 * @optest op1=0xfffffffe -> value.xcpt=0xd
6612 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6613 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6614 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6615 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6616 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6617 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6618 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6619 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6620 */
6621FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6622{
6623 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6624 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6625 return IEMOP_RAISE_INVALID_OPCODE();
6626
6627 IEM_MC_BEGIN(2, 0);
6628 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6629 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6633 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6634 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6635 IEM_MC_END();
6636 return VINF_SUCCESS;
6637}
6638
6639
6640/**
6641 * @opmaps grp15
6642 * @opcode !11/3
6643 * @oppfx none
6644 * @opcpuid sse
6645 * @opgroup og_sse_mxcsrsm
6646 * @opxcpttype 5
6647 * @optest mxcsr=0 -> op1=0
6648 * @optest mxcsr=0x2083 -> op1=0x2083
6649 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6650 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6651 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6652 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6653 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6654 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6655 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6656 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6657 */
6658FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6659{
6660 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6661 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6662 return IEMOP_RAISE_INVALID_OPCODE();
6663
6664 IEM_MC_BEGIN(2, 0);
6665 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6666 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6669 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6670 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6671 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6672 IEM_MC_END();
6673 return VINF_SUCCESS;
6674}
6675
6676
6677/**
6678 * @opmaps grp15
6679 * @opcode !11/4
6680 * @oppfx none
6681 * @opcpuid xsave
6682 * @opgroup og_system
6683 * @opxcpttype none
6684 */
6685FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6686{
6687 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6688 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6689 return IEMOP_RAISE_INVALID_OPCODE();
6690
6691 IEM_MC_BEGIN(3, 0);
6692 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6693 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6694 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6697 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6698 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6699 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6700 IEM_MC_END();
6701 return VINF_SUCCESS;
6702}
6703
6704
6705/**
6706 * @opmaps grp15
6707 * @opcode !11/5
6708 * @oppfx none
6709 * @opcpuid xsave
6710 * @opgroup og_system
6711 * @opxcpttype none
6712 */
6713FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6714{
6715 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6716 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6717 return IEMOP_RAISE_INVALID_OPCODE();
6718
6719 IEM_MC_BEGIN(3, 0);
6720 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6721 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6722 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6725 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6726 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6727 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6728 IEM_MC_END();
6729 return VINF_SUCCESS;
6730}
6731
6732/** Opcode 0x0f 0xae mem/6. */
6733FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6734
6735/**
6736 * @opmaps grp15
6737 * @opcode !11/7
6738 * @oppfx none
6739 * @opcpuid clfsh
6740 * @opgroup og_cachectl
6741 * @optest op1=1 ->
6742 */
6743FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6744{
6745 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6746 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6747 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6748
6749 IEM_MC_BEGIN(2, 0);
6750 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6751 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6754 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6755 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6756 IEM_MC_END();
6757 return VINF_SUCCESS;
6758}
6759
6760/**
6761 * @opmaps grp15
6762 * @opcode !11/7
6763 * @oppfx 0x66
6764 * @opcpuid clflushopt
6765 * @opgroup og_cachectl
6766 * @optest op1=1 ->
6767 */
6768FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6769{
6770 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6771 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6772 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6773
6774 IEM_MC_BEGIN(2, 0);
6775 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6776 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6779 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6780 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6781 IEM_MC_END();
6782 return VINF_SUCCESS;
6783}
6784
6785
6786/** Opcode 0x0f 0xae 11b/5. */
6787FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6788{
6789 RT_NOREF_PV(bRm);
6790 IEMOP_MNEMONIC(lfence, "lfence");
6791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6792 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6793 return IEMOP_RAISE_INVALID_OPCODE();
6794
6795 IEM_MC_BEGIN(0, 0);
6796 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6797 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6798 else
6799 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6800 IEM_MC_ADVANCE_RIP();
6801 IEM_MC_END();
6802 return VINF_SUCCESS;
6803}
6804
6805
6806/** Opcode 0x0f 0xae 11b/6. */
6807FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6808{
6809 RT_NOREF_PV(bRm);
6810 IEMOP_MNEMONIC(mfence, "mfence");
6811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6812 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6813 return IEMOP_RAISE_INVALID_OPCODE();
6814
6815 IEM_MC_BEGIN(0, 0);
6816 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6817 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6818 else
6819 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823}
6824
6825
6826/** Opcode 0x0f 0xae 11b/7. */
6827FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6828{
6829 RT_NOREF_PV(bRm);
6830 IEMOP_MNEMONIC(sfence, "sfence");
6831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6832 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6833 return IEMOP_RAISE_INVALID_OPCODE();
6834
6835 IEM_MC_BEGIN(0, 0);
6836 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6837 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6838 else
6839 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6840 IEM_MC_ADVANCE_RIP();
6841 IEM_MC_END();
6842 return VINF_SUCCESS;
6843}
6844
6845
6846/** Opcode 0xf3 0x0f 0xae 11b/0. */
6847FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6848
6849/** Opcode 0xf3 0x0f 0xae 11b/1. */
6850FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6851
6852/** Opcode 0xf3 0x0f 0xae 11b/2. */
6853FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6854
6855/** Opcode 0xf3 0x0f 0xae 11b/3. */
6856FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6857
6858
6859/**
6860 * Group 15 jump table for register variant.
6861 */
6862IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6863{ /* pfx: none, 066h, 0f3h, 0f2h */
6864 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6865 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6866 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6867 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6868 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6869 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6870 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6871 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6872};
6873AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6874
6875
6876/**
6877 * Group 15 jump table for memory variant.
6878 */
6879IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6880{ /* pfx: none, 066h, 0f3h, 0f2h */
6881 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6882 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6883 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6884 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6885 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6886 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6887 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6888 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6889};
6890AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6891
6892
6893/** Opcode 0x0f 0xae. */
6894FNIEMOP_DEF(iemOp_Grp15)
6895{
6896 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6899 /* register, register */
6900 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6901 + pVCpu->iem.s.idxPrefix], bRm);
6902 /* memory, register */
6903 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6904 + pVCpu->iem.s.idxPrefix], bRm);
6905}
6906
6907
6908/** Opcode 0x0f 0xaf. */
6909FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6910{
6911 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6912 IEMOP_HLP_MIN_386();
6913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6914 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6915}
6916
6917
6918/** Opcode 0x0f 0xb0. */
6919FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6920{
6921 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6922 IEMOP_HLP_MIN_486();
6923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6924
6925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6926 {
6927 IEMOP_HLP_DONE_DECODING();
6928 IEM_MC_BEGIN(4, 0);
6929 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6930 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6931 IEM_MC_ARG(uint8_t, u8Src, 2);
6932 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6933
6934 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6935 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6936 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6937 IEM_MC_REF_EFLAGS(pEFlags);
6938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6939 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6940 else
6941 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6942
6943 IEM_MC_ADVANCE_RIP();
6944 IEM_MC_END();
6945 }
6946 else
6947 {
6948 IEM_MC_BEGIN(4, 3);
6949 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6950 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6951 IEM_MC_ARG(uint8_t, u8Src, 2);
6952 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6954 IEM_MC_LOCAL(uint8_t, u8Al);
6955
6956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6957 IEMOP_HLP_DONE_DECODING();
6958 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6959 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6960 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6961 IEM_MC_FETCH_EFLAGS(EFlags);
6962 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6964 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6965 else
6966 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6967
6968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6969 IEM_MC_COMMIT_EFLAGS(EFlags);
6970 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6971 IEM_MC_ADVANCE_RIP();
6972 IEM_MC_END();
6973 }
6974 return VINF_SUCCESS;
6975}
6976
6977/** Opcode 0x0f 0xb1. */
6978FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6979{
6980 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6981 IEMOP_HLP_MIN_486();
6982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6983
6984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6985 {
6986 IEMOP_HLP_DONE_DECODING();
6987 switch (pVCpu->iem.s.enmEffOpSize)
6988 {
6989 case IEMMODE_16BIT:
6990 IEM_MC_BEGIN(4, 0);
6991 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6992 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6993 IEM_MC_ARG(uint16_t, u16Src, 2);
6994 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6995
6996 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6997 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6998 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6999 IEM_MC_REF_EFLAGS(pEFlags);
7000 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7001 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7002 else
7003 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7004
7005 IEM_MC_ADVANCE_RIP();
7006 IEM_MC_END();
7007 return VINF_SUCCESS;
7008
7009 case IEMMODE_32BIT:
7010 IEM_MC_BEGIN(4, 0);
7011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7012 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7013 IEM_MC_ARG(uint32_t, u32Src, 2);
7014 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7015
7016 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7017 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7018 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7019 IEM_MC_REF_EFLAGS(pEFlags);
7020 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7021 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7022 else
7023 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7024
7025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7026 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7027 IEM_MC_ADVANCE_RIP();
7028 IEM_MC_END();
7029 return VINF_SUCCESS;
7030
7031 case IEMMODE_64BIT:
7032 IEM_MC_BEGIN(4, 0);
7033 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7034 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7035#ifdef RT_ARCH_X86
7036 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7037#else
7038 IEM_MC_ARG(uint64_t, u64Src, 2);
7039#endif
7040 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7041
7042 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7043 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7044 IEM_MC_REF_EFLAGS(pEFlags);
7045#ifdef RT_ARCH_X86
7046 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7048 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7049 else
7050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7051#else
7052 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7053 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7055 else
7056 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7057#endif
7058
7059 IEM_MC_ADVANCE_RIP();
7060 IEM_MC_END();
7061 return VINF_SUCCESS;
7062
7063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7064 }
7065 }
7066 else
7067 {
7068 switch (pVCpu->iem.s.enmEffOpSize)
7069 {
7070 case IEMMODE_16BIT:
7071 IEM_MC_BEGIN(4, 3);
7072 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7073 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7074 IEM_MC_ARG(uint16_t, u16Src, 2);
7075 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7077 IEM_MC_LOCAL(uint16_t, u16Ax);
7078
7079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7080 IEMOP_HLP_DONE_DECODING();
7081 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7082 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7083 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7084 IEM_MC_FETCH_EFLAGS(EFlags);
7085 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7086 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7088 else
7089 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7090
7091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7092 IEM_MC_COMMIT_EFLAGS(EFlags);
7093 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7094 IEM_MC_ADVANCE_RIP();
7095 IEM_MC_END();
7096 return VINF_SUCCESS;
7097
7098 case IEMMODE_32BIT:
7099 IEM_MC_BEGIN(4, 3);
7100 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7101 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7102 IEM_MC_ARG(uint32_t, u32Src, 2);
7103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7105 IEM_MC_LOCAL(uint32_t, u32Eax);
7106
7107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7108 IEMOP_HLP_DONE_DECODING();
7109 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7110 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7111 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7112 IEM_MC_FETCH_EFLAGS(EFlags);
7113 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7114 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7115 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7116 else
7117 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7118
7119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7120 IEM_MC_COMMIT_EFLAGS(EFlags);
7121 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7122 IEM_MC_ADVANCE_RIP();
7123 IEM_MC_END();
7124 return VINF_SUCCESS;
7125
7126 case IEMMODE_64BIT:
7127 IEM_MC_BEGIN(4, 3);
7128 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7129 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7130#ifdef RT_ARCH_X86
7131 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7132#else
7133 IEM_MC_ARG(uint64_t, u64Src, 2);
7134#endif
7135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7137 IEM_MC_LOCAL(uint64_t, u64Rax);
7138
7139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7140 IEMOP_HLP_DONE_DECODING();
7141 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7142 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7143 IEM_MC_FETCH_EFLAGS(EFlags);
7144 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7145#ifdef RT_ARCH_X86
7146 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7147 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7148 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7149 else
7150 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7151#else
7152 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7153 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7155 else
7156 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7157#endif
7158
7159 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7160 IEM_MC_COMMIT_EFLAGS(EFlags);
7161 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7162 IEM_MC_ADVANCE_RIP();
7163 IEM_MC_END();
7164 return VINF_SUCCESS;
7165
7166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7167 }
7168 }
7169}
7170
7171
7172FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7173{
7174 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7175 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7176
7177 switch (pVCpu->iem.s.enmEffOpSize)
7178 {
7179 case IEMMODE_16BIT:
7180 IEM_MC_BEGIN(5, 1);
7181 IEM_MC_ARG(uint16_t, uSel, 0);
7182 IEM_MC_ARG(uint16_t, offSeg, 1);
7183 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7184 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7185 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7186 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7189 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7190 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7191 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7192 IEM_MC_END();
7193 return VINF_SUCCESS;
7194
7195 case IEMMODE_32BIT:
7196 IEM_MC_BEGIN(5, 1);
7197 IEM_MC_ARG(uint16_t, uSel, 0);
7198 IEM_MC_ARG(uint32_t, offSeg, 1);
7199 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7200 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7201 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7202 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7205 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7206 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7207 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7208 IEM_MC_END();
7209 return VINF_SUCCESS;
7210
7211 case IEMMODE_64BIT:
7212 IEM_MC_BEGIN(5, 1);
7213 IEM_MC_ARG(uint16_t, uSel, 0);
7214 IEM_MC_ARG(uint64_t, offSeg, 1);
7215 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7216 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7217 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7218 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7221 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7222 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7223 else
7224 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7225 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7226 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7227 IEM_MC_END();
7228 return VINF_SUCCESS;
7229
7230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7231 }
7232}
7233
7234
7235/** Opcode 0x0f 0xb2. */
7236FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7237{
7238 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7239 IEMOP_HLP_MIN_386();
7240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7242 return IEMOP_RAISE_INVALID_OPCODE();
7243 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7244}
7245
7246
7247/** Opcode 0x0f 0xb3. */
7248FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7249{
7250 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7251 IEMOP_HLP_MIN_386();
7252 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7253}
7254
7255
7256/** Opcode 0x0f 0xb4. */
7257FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7258{
7259 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7260 IEMOP_HLP_MIN_386();
7261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7263 return IEMOP_RAISE_INVALID_OPCODE();
7264 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7265}
7266
7267
7268/** Opcode 0x0f 0xb5. */
7269FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7270{
7271 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7272 IEMOP_HLP_MIN_386();
7273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7275 return IEMOP_RAISE_INVALID_OPCODE();
7276 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7277}
7278
7279
7280/** Opcode 0x0f 0xb6. */
7281FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7282{
7283 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7284 IEMOP_HLP_MIN_386();
7285
7286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7287
7288 /*
7289 * If rm is denoting a register, no more instruction bytes.
7290 */
7291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7292 {
7293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7294 switch (pVCpu->iem.s.enmEffOpSize)
7295 {
7296 case IEMMODE_16BIT:
7297 IEM_MC_BEGIN(0, 1);
7298 IEM_MC_LOCAL(uint16_t, u16Value);
7299 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7300 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7301 IEM_MC_ADVANCE_RIP();
7302 IEM_MC_END();
7303 return VINF_SUCCESS;
7304
7305 case IEMMODE_32BIT:
7306 IEM_MC_BEGIN(0, 1);
7307 IEM_MC_LOCAL(uint32_t, u32Value);
7308 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7309 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7310 IEM_MC_ADVANCE_RIP();
7311 IEM_MC_END();
7312 return VINF_SUCCESS;
7313
7314 case IEMMODE_64BIT:
7315 IEM_MC_BEGIN(0, 1);
7316 IEM_MC_LOCAL(uint64_t, u64Value);
7317 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7318 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7319 IEM_MC_ADVANCE_RIP();
7320 IEM_MC_END();
7321 return VINF_SUCCESS;
7322
7323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7324 }
7325 }
7326 else
7327 {
7328 /*
7329 * We're loading a register from memory.
7330 */
7331 switch (pVCpu->iem.s.enmEffOpSize)
7332 {
7333 case IEMMODE_16BIT:
7334 IEM_MC_BEGIN(0, 2);
7335 IEM_MC_LOCAL(uint16_t, u16Value);
7336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7339 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7340 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7341 IEM_MC_ADVANCE_RIP();
7342 IEM_MC_END();
7343 return VINF_SUCCESS;
7344
7345 case IEMMODE_32BIT:
7346 IEM_MC_BEGIN(0, 2);
7347 IEM_MC_LOCAL(uint32_t, u32Value);
7348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7351 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7352 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7353 IEM_MC_ADVANCE_RIP();
7354 IEM_MC_END();
7355 return VINF_SUCCESS;
7356
7357 case IEMMODE_64BIT:
7358 IEM_MC_BEGIN(0, 2);
7359 IEM_MC_LOCAL(uint64_t, u64Value);
7360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7363 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7364 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7365 IEM_MC_ADVANCE_RIP();
7366 IEM_MC_END();
7367 return VINF_SUCCESS;
7368
7369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7370 }
7371 }
7372}
7373
7374
7375/** Opcode 0x0f 0xb7. */
7376FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7377{
7378 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7379 IEMOP_HLP_MIN_386();
7380
7381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7382
7383 /** @todo Not entirely sure how the operand size prefix is handled here,
7384 * assuming that it will be ignored. Would be nice to have a few
7385 * test for this. */
7386 /*
7387 * If rm is denoting a register, no more instruction bytes.
7388 */
7389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7390 {
7391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7392 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7393 {
7394 IEM_MC_BEGIN(0, 1);
7395 IEM_MC_LOCAL(uint32_t, u32Value);
7396 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7397 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7398 IEM_MC_ADVANCE_RIP();
7399 IEM_MC_END();
7400 }
7401 else
7402 {
7403 IEM_MC_BEGIN(0, 1);
7404 IEM_MC_LOCAL(uint64_t, u64Value);
7405 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7406 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7407 IEM_MC_ADVANCE_RIP();
7408 IEM_MC_END();
7409 }
7410 }
7411 else
7412 {
7413 /*
7414 * We're loading a register from memory.
7415 */
7416 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7417 {
7418 IEM_MC_BEGIN(0, 2);
7419 IEM_MC_LOCAL(uint32_t, u32Value);
7420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7424 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7425 IEM_MC_ADVANCE_RIP();
7426 IEM_MC_END();
7427 }
7428 else
7429 {
7430 IEM_MC_BEGIN(0, 2);
7431 IEM_MC_LOCAL(uint64_t, u64Value);
7432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7435 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7436 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7437 IEM_MC_ADVANCE_RIP();
7438 IEM_MC_END();
7439 }
7440 }
7441 return VINF_SUCCESS;
7442}
7443
7444
7445/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7446FNIEMOP_UD_STUB(iemOp_jmpe);
7447/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7448FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7449
7450
7451/**
7452 * @opcode 0xb9
7453 * @opinvalid intel-modrm
7454 * @optest ->
7455 */
7456FNIEMOP_DEF(iemOp_Grp10)
7457{
7458 /*
7459 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7460 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7461 */
7462 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7463 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7464 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7465}
7466
7467
7468/** Opcode 0x0f 0xba. */
7469FNIEMOP_DEF(iemOp_Grp8)
7470{
7471 IEMOP_HLP_MIN_386();
7472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7473 PCIEMOPBINSIZES pImpl;
7474 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7475 {
7476 case 0: case 1: case 2: case 3:
7477 /* Both AMD and Intel want full modr/m decoding and imm8. */
7478 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7479 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7480 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7481 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7482 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7484 }
7485 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7486
7487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7488 {
7489 /* register destination. */
7490 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7492
7493 switch (pVCpu->iem.s.enmEffOpSize)
7494 {
7495 case IEMMODE_16BIT:
7496 IEM_MC_BEGIN(3, 0);
7497 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7498 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7499 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7500
7501 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7502 IEM_MC_REF_EFLAGS(pEFlags);
7503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7504
7505 IEM_MC_ADVANCE_RIP();
7506 IEM_MC_END();
7507 return VINF_SUCCESS;
7508
7509 case IEMMODE_32BIT:
7510 IEM_MC_BEGIN(3, 0);
7511 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7512 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7513 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7514
7515 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7516 IEM_MC_REF_EFLAGS(pEFlags);
7517 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7518
7519 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7520 IEM_MC_ADVANCE_RIP();
7521 IEM_MC_END();
7522 return VINF_SUCCESS;
7523
7524 case IEMMODE_64BIT:
7525 IEM_MC_BEGIN(3, 0);
7526 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7527 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7529
7530 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7531 IEM_MC_REF_EFLAGS(pEFlags);
7532 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7533
7534 IEM_MC_ADVANCE_RIP();
7535 IEM_MC_END();
7536 return VINF_SUCCESS;
7537
7538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7539 }
7540 }
7541 else
7542 {
7543 /* memory destination. */
7544
7545 uint32_t fAccess;
7546 if (pImpl->pfnLockedU16)
7547 fAccess = IEM_ACCESS_DATA_RW;
7548 else /* BT */
7549 fAccess = IEM_ACCESS_DATA_R;
7550
7551 /** @todo test negative bit offsets! */
7552 switch (pVCpu->iem.s.enmEffOpSize)
7553 {
7554 case IEMMODE_16BIT:
7555 IEM_MC_BEGIN(3, 1);
7556 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7557 IEM_MC_ARG(uint16_t, u16Src, 1);
7558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7560
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7562 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7563 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7564 if (pImpl->pfnLockedU16)
7565 IEMOP_HLP_DONE_DECODING();
7566 else
7567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7568 IEM_MC_FETCH_EFLAGS(EFlags);
7569 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7570 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7571 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7572 else
7573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7575
7576 IEM_MC_COMMIT_EFLAGS(EFlags);
7577 IEM_MC_ADVANCE_RIP();
7578 IEM_MC_END();
7579 return VINF_SUCCESS;
7580
7581 case IEMMODE_32BIT:
7582 IEM_MC_BEGIN(3, 1);
7583 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7584 IEM_MC_ARG(uint32_t, u32Src, 1);
7585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7587
7588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7589 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7590 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7591 if (pImpl->pfnLockedU16)
7592 IEMOP_HLP_DONE_DECODING();
7593 else
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595 IEM_MC_FETCH_EFLAGS(EFlags);
7596 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7597 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7599 else
7600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7602
7603 IEM_MC_COMMIT_EFLAGS(EFlags);
7604 IEM_MC_ADVANCE_RIP();
7605 IEM_MC_END();
7606 return VINF_SUCCESS;
7607
7608 case IEMMODE_64BIT:
7609 IEM_MC_BEGIN(3, 1);
7610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7611 IEM_MC_ARG(uint64_t, u64Src, 1);
7612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7614
7615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7616 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7617 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7618 if (pImpl->pfnLockedU16)
7619 IEMOP_HLP_DONE_DECODING();
7620 else
7621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7622 IEM_MC_FETCH_EFLAGS(EFlags);
7623 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7624 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7626 else
7627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7629
7630 IEM_MC_COMMIT_EFLAGS(EFlags);
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633 return VINF_SUCCESS;
7634
7635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7636 }
7637 }
7638}
7639
7640
7641/** Opcode 0x0f 0xbb. */
7642FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7643{
7644 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7645 IEMOP_HLP_MIN_386();
7646 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7647}
7648
7649
7650/** Opcode 0x0f 0xbc. */
7651FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7652{
7653 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7654 IEMOP_HLP_MIN_386();
7655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7656 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7657}
7658
7659
7660/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7661FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7662
7663
7664/** Opcode 0x0f 0xbd. */
7665FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7666{
7667 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7668 IEMOP_HLP_MIN_386();
7669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7670 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7671}
7672
7673
7674/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7675FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7676
7677
7678/** Opcode 0x0f 0xbe. */
7679FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7680{
7681 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7682 IEMOP_HLP_MIN_386();
7683
7684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7685
7686 /*
7687 * If rm is denoting a register, no more instruction bytes.
7688 */
7689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7690 {
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 switch (pVCpu->iem.s.enmEffOpSize)
7693 {
7694 case IEMMODE_16BIT:
7695 IEM_MC_BEGIN(0, 1);
7696 IEM_MC_LOCAL(uint16_t, u16Value);
7697 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7698 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7699 IEM_MC_ADVANCE_RIP();
7700 IEM_MC_END();
7701 return VINF_SUCCESS;
7702
7703 case IEMMODE_32BIT:
7704 IEM_MC_BEGIN(0, 1);
7705 IEM_MC_LOCAL(uint32_t, u32Value);
7706 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7707 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7708 IEM_MC_ADVANCE_RIP();
7709 IEM_MC_END();
7710 return VINF_SUCCESS;
7711
7712 case IEMMODE_64BIT:
7713 IEM_MC_BEGIN(0, 1);
7714 IEM_MC_LOCAL(uint64_t, u64Value);
7715 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7716 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7717 IEM_MC_ADVANCE_RIP();
7718 IEM_MC_END();
7719 return VINF_SUCCESS;
7720
7721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7722 }
7723 }
7724 else
7725 {
7726 /*
7727 * We're loading a register from memory.
7728 */
7729 switch (pVCpu->iem.s.enmEffOpSize)
7730 {
7731 case IEMMODE_16BIT:
7732 IEM_MC_BEGIN(0, 2);
7733 IEM_MC_LOCAL(uint16_t, u16Value);
7734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7738 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7739 IEM_MC_ADVANCE_RIP();
7740 IEM_MC_END();
7741 return VINF_SUCCESS;
7742
7743 case IEMMODE_32BIT:
7744 IEM_MC_BEGIN(0, 2);
7745 IEM_MC_LOCAL(uint32_t, u32Value);
7746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7749 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7750 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 return VINF_SUCCESS;
7754
7755 case IEMMODE_64BIT:
7756 IEM_MC_BEGIN(0, 2);
7757 IEM_MC_LOCAL(uint64_t, u64Value);
7758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7761 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7762 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766
7767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7768 }
7769 }
7770}
7771
7772
7773/** Opcode 0x0f 0xbf. */
7774FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7775{
7776 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7777 IEMOP_HLP_MIN_386();
7778
7779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7780
7781 /** @todo Not entirely sure how the operand size prefix is handled here,
7782 * assuming that it will be ignored. Would be nice to have a few
7783 * test for this. */
7784 /*
7785 * If rm is denoting a register, no more instruction bytes.
7786 */
7787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7788 {
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7791 {
7792 IEM_MC_BEGIN(0, 1);
7793 IEM_MC_LOCAL(uint32_t, u32Value);
7794 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7795 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7796 IEM_MC_ADVANCE_RIP();
7797 IEM_MC_END();
7798 }
7799 else
7800 {
7801 IEM_MC_BEGIN(0, 1);
7802 IEM_MC_LOCAL(uint64_t, u64Value);
7803 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7804 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7805 IEM_MC_ADVANCE_RIP();
7806 IEM_MC_END();
7807 }
7808 }
7809 else
7810 {
7811 /*
7812 * We're loading a register from memory.
7813 */
7814 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7815 {
7816 IEM_MC_BEGIN(0, 2);
7817 IEM_MC_LOCAL(uint32_t, u32Value);
7818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7822 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7823 IEM_MC_ADVANCE_RIP();
7824 IEM_MC_END();
7825 }
7826 else
7827 {
7828 IEM_MC_BEGIN(0, 2);
7829 IEM_MC_LOCAL(uint64_t, u64Value);
7830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7833 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7834 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7835 IEM_MC_ADVANCE_RIP();
7836 IEM_MC_END();
7837 }
7838 }
7839 return VINF_SUCCESS;
7840}
7841
7842
7843/** Opcode 0x0f 0xc0. */
7844FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7845{
7846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7847 IEMOP_HLP_MIN_486();
7848 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7849
7850 /*
7851 * If rm is denoting a register, no more instruction bytes.
7852 */
7853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7854 {
7855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7856
7857 IEM_MC_BEGIN(3, 0);
7858 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7859 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7861
7862 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7863 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7864 IEM_MC_REF_EFLAGS(pEFlags);
7865 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7866
7867 IEM_MC_ADVANCE_RIP();
7868 IEM_MC_END();
7869 }
7870 else
7871 {
7872 /*
7873 * We're accessing memory.
7874 */
7875 IEM_MC_BEGIN(3, 3);
7876 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7877 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7878 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7879 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7881
7882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7883 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7884 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7885 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7886 IEM_MC_FETCH_EFLAGS(EFlags);
7887 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7888 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7889 else
7890 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7891
7892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7893 IEM_MC_COMMIT_EFLAGS(EFlags);
7894 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7895 IEM_MC_ADVANCE_RIP();
7896 IEM_MC_END();
7897 return VINF_SUCCESS;
7898 }
7899 return VINF_SUCCESS;
7900}
7901
7902
7903/** Opcode 0x0f 0xc1. */
7904FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7905{
7906 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7907 IEMOP_HLP_MIN_486();
7908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7909
7910 /*
7911 * If rm is denoting a register, no more instruction bytes.
7912 */
7913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7914 {
7915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7916
7917 switch (pVCpu->iem.s.enmEffOpSize)
7918 {
7919 case IEMMODE_16BIT:
7920 IEM_MC_BEGIN(3, 0);
7921 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7922 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7923 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7924
7925 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7926 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7927 IEM_MC_REF_EFLAGS(pEFlags);
7928 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7929
7930 IEM_MC_ADVANCE_RIP();
7931 IEM_MC_END();
7932 return VINF_SUCCESS;
7933
7934 case IEMMODE_32BIT:
7935 IEM_MC_BEGIN(3, 0);
7936 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7937 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7939
7940 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7941 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7942 IEM_MC_REF_EFLAGS(pEFlags);
7943 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7944
7945 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7946 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 return VINF_SUCCESS;
7950
7951 case IEMMODE_64BIT:
7952 IEM_MC_BEGIN(3, 0);
7953 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7954 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7955 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7956
7957 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7958 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7959 IEM_MC_REF_EFLAGS(pEFlags);
7960 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7961
7962 IEM_MC_ADVANCE_RIP();
7963 IEM_MC_END();
7964 return VINF_SUCCESS;
7965
7966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7967 }
7968 }
7969 else
7970 {
7971 /*
7972 * We're accessing memory.
7973 */
7974 switch (pVCpu->iem.s.enmEffOpSize)
7975 {
7976 case IEMMODE_16BIT:
7977 IEM_MC_BEGIN(3, 3);
7978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7979 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7980 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7981 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7983
7984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7985 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7986 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7987 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7988 IEM_MC_FETCH_EFLAGS(EFlags);
7989 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7991 else
7992 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7993
7994 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7995 IEM_MC_COMMIT_EFLAGS(EFlags);
7996 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7997 IEM_MC_ADVANCE_RIP();
7998 IEM_MC_END();
7999 return VINF_SUCCESS;
8000
8001 case IEMMODE_32BIT:
8002 IEM_MC_BEGIN(3, 3);
8003 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8004 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8005 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8006 IEM_MC_LOCAL(uint32_t, u32RegCopy);
8007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8008
8009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8010 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8011 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8012 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8013 IEM_MC_FETCH_EFLAGS(EFlags);
8014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8016 else
8017 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8018
8019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8020 IEM_MC_COMMIT_EFLAGS(EFlags);
8021 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8022 IEM_MC_ADVANCE_RIP();
8023 IEM_MC_END();
8024 return VINF_SUCCESS;
8025
8026 case IEMMODE_64BIT:
8027 IEM_MC_BEGIN(3, 3);
8028 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8029 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8030 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8031 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8033
8034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8035 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8036 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8037 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8038 IEM_MC_FETCH_EFLAGS(EFlags);
8039 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8040 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8041 else
8042 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8043
8044 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8045 IEM_MC_COMMIT_EFLAGS(EFlags);
8046 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8047 IEM_MC_ADVANCE_RIP();
8048 IEM_MC_END();
8049 return VINF_SUCCESS;
8050
8051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8052 }
8053 }
8054}
8055
8056
8057/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8058FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8059/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8060FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8061/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8062FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8063/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8064FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8065
8066
8067/** Opcode 0x0f 0xc3. */
8068FNIEMOP_DEF(iemOp_movnti_My_Gy)
8069{
8070 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8071
8072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8073
8074 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8075 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8076 {
8077 switch (pVCpu->iem.s.enmEffOpSize)
8078 {
8079 case IEMMODE_32BIT:
8080 IEM_MC_BEGIN(0, 2);
8081 IEM_MC_LOCAL(uint32_t, u32Value);
8082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8083
8084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8086 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8087 return IEMOP_RAISE_INVALID_OPCODE();
8088
8089 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8090 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8091 IEM_MC_ADVANCE_RIP();
8092 IEM_MC_END();
8093 break;
8094
8095 case IEMMODE_64BIT:
8096 IEM_MC_BEGIN(0, 2);
8097 IEM_MC_LOCAL(uint64_t, u64Value);
8098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8099
8100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8102 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8103 return IEMOP_RAISE_INVALID_OPCODE();
8104
8105 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8106 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8107 IEM_MC_ADVANCE_RIP();
8108 IEM_MC_END();
8109 break;
8110
8111 case IEMMODE_16BIT:
8112 /** @todo check this form. */
8113 return IEMOP_RAISE_INVALID_OPCODE();
8114 }
8115 }
8116 else
8117 return IEMOP_RAISE_INVALID_OPCODE();
8118 return VINF_SUCCESS;
8119}
8120/* Opcode 0x66 0x0f 0xc3 - invalid */
8121/* Opcode 0xf3 0x0f 0xc3 - invalid */
8122/* Opcode 0xf2 0x0f 0xc3 - invalid */
8123
8124/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8125FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8126/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8127FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8128/* Opcode 0xf3 0x0f 0xc4 - invalid */
8129/* Opcode 0xf2 0x0f 0xc4 - invalid */
8130
8131/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8132FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8133/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8134FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8135/* Opcode 0xf3 0x0f 0xc5 - invalid */
8136/* Opcode 0xf2 0x0f 0xc5 - invalid */
8137
8138/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8139FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8140/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8141FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8142/* Opcode 0xf3 0x0f 0xc6 - invalid */
8143/* Opcode 0xf2 0x0f 0xc6 - invalid */
8144
8145
8146/** Opcode 0x0f 0xc7 !11/1. */
8147FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8148{
8149 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8150
8151 IEM_MC_BEGIN(4, 3);
8152 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8153 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8154 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8155 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8156 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8157 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8159
8160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8161 IEMOP_HLP_DONE_DECODING();
8162 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8163
8164 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8165 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8166 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8167
8168 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8169 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8170 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8171
8172 IEM_MC_FETCH_EFLAGS(EFlags);
8173 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8175 else
8176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8177
8178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8179 IEM_MC_COMMIT_EFLAGS(EFlags);
8180 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8181 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8182 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8183 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8184 IEM_MC_ENDIF();
8185 IEM_MC_ADVANCE_RIP();
8186
8187 IEM_MC_END();
8188 return VINF_SUCCESS;
8189}
8190
8191
8192/** Opcode REX.W 0x0f 0xc7 !11/1. */
8193FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8194{
8195 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8196 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8197 {
8198#if 0
8199 RT_NOREF(bRm);
8200 IEMOP_BITCH_ABOUT_STUB();
8201 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8202#else
8203 IEM_MC_BEGIN(4, 3);
8204 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8205 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8206 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8207 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8208 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8209 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8211
8212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8213 IEMOP_HLP_DONE_DECODING();
8214 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8215 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8216
8217 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8218 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8219 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8220
8221 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8222 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8223 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8224
8225 IEM_MC_FETCH_EFLAGS(EFlags);
8226# ifdef RT_ARCH_AMD64
8227 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8228 {
8229 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8231 else
8232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8233 }
8234 else
8235# endif
8236 {
8237 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8238 accesses and not all all atomic, which works fine on in UNI CPU guest
8239 configuration (ignoring DMA). If guest SMP is active we have no choice
8240 but to use a rendezvous callback here. Sigh. */
8241 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8243 else
8244 {
8245 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8246 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8247 }
8248 }
8249
8250 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8251 IEM_MC_COMMIT_EFLAGS(EFlags);
8252 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8253 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8254 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8255 IEM_MC_ENDIF();
8256 IEM_MC_ADVANCE_RIP();
8257
8258 IEM_MC_END();
8259 return VINF_SUCCESS;
8260#endif
8261 }
8262 Log(("cmpxchg16b -> #UD\n"));
8263 return IEMOP_RAISE_INVALID_OPCODE();
8264}
8265
8266FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8267{
8268 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8269 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8270 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8271}
8272
8273/** Opcode 0x0f 0xc7 11/6. */
8274FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8275
8276/** Opcode 0x0f 0xc7 !11/6. */
8277FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8278
8279/** Opcode 0x66 0x0f 0xc7 !11/6. */
8280FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8281
8282/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8283FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8284
8285/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8286FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8287
8288/** Opcode 0x0f 0xc7 11/7. */
8289FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8290
8291
8292/**
8293 * Group 9 jump table for register variant.
8294 */
8295IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8296{ /* pfx: none, 066h, 0f3h, 0f2h */
8297 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8298 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8299 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8300 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8301 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8302 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8303 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8304 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8305};
8306AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8307
8308
8309/**
8310 * Group 9 jump table for memory variant.
8311 */
8312IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8313{ /* pfx: none, 066h, 0f3h, 0f2h */
8314 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8315 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8316 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8317 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8318 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8319 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8320 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8321 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8322};
8323AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8324
8325
8326/** Opcode 0x0f 0xc7. */
8327FNIEMOP_DEF(iemOp_Grp9)
8328{
8329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8331 /* register, register */
8332 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8333 + pVCpu->iem.s.idxPrefix], bRm);
8334 /* memory, register */
8335 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8336 + pVCpu->iem.s.idxPrefix], bRm);
8337}
8338
8339
8340/**
8341 * Common 'bswap register' helper.
8342 */
8343FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8344{
8345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8346 switch (pVCpu->iem.s.enmEffOpSize)
8347 {
8348 case IEMMODE_16BIT:
8349 IEM_MC_BEGIN(1, 0);
8350 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8351 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8352 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8353 IEM_MC_ADVANCE_RIP();
8354 IEM_MC_END();
8355 return VINF_SUCCESS;
8356
8357 case IEMMODE_32BIT:
8358 IEM_MC_BEGIN(1, 0);
8359 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8360 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8361 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8362 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8363 IEM_MC_ADVANCE_RIP();
8364 IEM_MC_END();
8365 return VINF_SUCCESS;
8366
8367 case IEMMODE_64BIT:
8368 IEM_MC_BEGIN(1, 0);
8369 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8370 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8371 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8372 IEM_MC_ADVANCE_RIP();
8373 IEM_MC_END();
8374 return VINF_SUCCESS;
8375
8376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8377 }
8378}
8379
8380
8381/** Opcode 0x0f 0xc8. */
8382FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8383{
8384 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8385 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8386 prefix. REX.B is the correct prefix it appears. For a parallel
8387 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8388 IEMOP_HLP_MIN_486();
8389 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8390}
8391
8392
8393/** Opcode 0x0f 0xc9. */
8394FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8395{
8396 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8397 IEMOP_HLP_MIN_486();
8398 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8399}
8400
8401
8402/** Opcode 0x0f 0xca. */
8403FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8404{
8405 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8406 IEMOP_HLP_MIN_486();
8407 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8408}
8409
8410
8411/** Opcode 0x0f 0xcb. */
8412FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8413{
8414 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8415 IEMOP_HLP_MIN_486();
8416 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8417}
8418
8419
8420/** Opcode 0x0f 0xcc. */
8421FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8422{
8423 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8424 IEMOP_HLP_MIN_486();
8425 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8426}
8427
8428
8429/** Opcode 0x0f 0xcd. */
8430FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8431{
8432 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8433 IEMOP_HLP_MIN_486();
8434 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8435}
8436
8437
8438/** Opcode 0x0f 0xce. */
8439FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8440{
8441 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8442 IEMOP_HLP_MIN_486();
8443 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8444}
8445
8446
8447/** Opcode 0x0f 0xcf. */
8448FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8449{
8450 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8451 IEMOP_HLP_MIN_486();
8452 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8453}
8454
8455
8456/* Opcode 0x0f 0xd0 - invalid */
8457/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8458FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8459/* Opcode 0xf3 0x0f 0xd0 - invalid */
8460/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8461FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8462
8463/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8464FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8465/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8466FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8467/* Opcode 0xf3 0x0f 0xd1 - invalid */
8468/* Opcode 0xf2 0x0f 0xd1 - invalid */
8469
8470/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8471FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8472/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8473FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8474/* Opcode 0xf3 0x0f 0xd2 - invalid */
8475/* Opcode 0xf2 0x0f 0xd2 - invalid */
8476
8477/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8478FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8479/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8480FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8481/* Opcode 0xf3 0x0f 0xd3 - invalid */
8482/* Opcode 0xf2 0x0f 0xd3 - invalid */
8483
8484/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8485FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8486/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8487FNIEMOP_STUB(iemOp_paddq_Vx_W);
8488/* Opcode 0xf3 0x0f 0xd4 - invalid */
8489/* Opcode 0xf2 0x0f 0xd4 - invalid */
8490
8491/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8492FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8493/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8494FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8495/* Opcode 0xf3 0x0f 0xd5 - invalid */
8496/* Opcode 0xf2 0x0f 0xd5 - invalid */
8497
8498/* Opcode 0x0f 0xd6 - invalid */
8499
8500/**
8501 * @opcode 0xd6
8502 * @oppfx 0x66
8503 * @opcpuid sse2
8504 * @opgroup og_sse2_pcksclr_datamove
8505 * @opxcpttype none
8506 * @optest op1=-1 op2=2 -> op1=2
8507 * @optest op1=0 op2=-42 -> op1=-42
8508 */
8509FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8510{
8511 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8514 {
8515 /*
8516 * Register, register.
8517 */
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_BEGIN(0, 2);
8520 IEM_MC_LOCAL(uint64_t, uSrc);
8521
8522 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8524
8525 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8526 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8527
8528 IEM_MC_ADVANCE_RIP();
8529 IEM_MC_END();
8530 }
8531 else
8532 {
8533 /*
8534 * Memory, register.
8535 */
8536 IEM_MC_BEGIN(0, 2);
8537 IEM_MC_LOCAL(uint64_t, uSrc);
8538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8539
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8544
8545 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8546 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8547
8548 IEM_MC_ADVANCE_RIP();
8549 IEM_MC_END();
8550 }
8551 return VINF_SUCCESS;
8552}
8553
8554
8555/**
8556 * @opcode 0xd6
8557 * @opcodesub 11 mr/reg
8558 * @oppfx f3
8559 * @opcpuid sse2
8560 * @opgroup og_sse2_simdint_datamove
8561 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8562 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8563 */
8564FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8565{
8566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8568 {
8569 /*
8570 * Register, register.
8571 */
8572 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574 IEM_MC_BEGIN(0, 1);
8575 IEM_MC_LOCAL(uint64_t, uSrc);
8576
8577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8578 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8579
8580 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8581 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8582 IEM_MC_FPU_TO_MMX_MODE();
8583
8584 IEM_MC_ADVANCE_RIP();
8585 IEM_MC_END();
8586 return VINF_SUCCESS;
8587 }
8588
8589 /**
8590 * @opdone
8591 * @opmnemonic udf30fd6mem
8592 * @opcode 0xd6
8593 * @opcodesub !11 mr/reg
8594 * @oppfx f3
8595 * @opunused intel-modrm
8596 * @opcpuid sse
8597 * @optest ->
8598 */
8599 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8600}
8601
8602
8603/**
8604 * @opcode 0xd6
8605 * @opcodesub 11 mr/reg
8606 * @oppfx f2
8607 * @opcpuid sse2
8608 * @opgroup og_sse2_simdint_datamove
8609 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8610 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8611 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8612 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8613 * @optest op1=-42 op2=0xfedcba9876543210
8614 * -> op1=0xfedcba9876543210 ftw=0xff
8615 */
8616FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8617{
8618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8620 {
8621 /*
8622 * Register, register.
8623 */
8624 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 IEM_MC_BEGIN(0, 1);
8627 IEM_MC_LOCAL(uint64_t, uSrc);
8628
8629 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8630 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8631
8632 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8633 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8634 IEM_MC_FPU_TO_MMX_MODE();
8635
8636 IEM_MC_ADVANCE_RIP();
8637 IEM_MC_END();
8638 return VINF_SUCCESS;
8639 }
8640
8641 /**
8642 * @opdone
8643 * @opmnemonic udf20fd6mem
8644 * @opcode 0xd6
8645 * @opcodesub !11 mr/reg
8646 * @oppfx f2
8647 * @opunused intel-modrm
8648 * @opcpuid sse
8649 * @optest ->
8650 */
8651 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8652}
8653
8654/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8655FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8656{
8657 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8658 /** @todo testcase: Check that the instruction implicitly clears the high
8659 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8660 * and opcode modifications are made to work with the whole width (not
8661 * just 128). */
8662 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8663 /* Docs says register only. */
8664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8666 {
8667 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8668 IEM_MC_BEGIN(2, 0);
8669 IEM_MC_ARG(uint64_t *, pDst, 0);
8670 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8671 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8672 IEM_MC_PREPARE_FPU_USAGE();
8673 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8674 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8675 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8676 IEM_MC_ADVANCE_RIP();
8677 IEM_MC_END();
8678 return VINF_SUCCESS;
8679 }
8680 return IEMOP_RAISE_INVALID_OPCODE();
8681}
8682
8683/** Opcode 0x66 0x0f 0xd7 - */
8684FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8685{
8686 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8687 /** @todo testcase: Check that the instruction implicitly clears the high
8688 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8689 * and opcode modifications are made to work with the whole width (not
8690 * just 128). */
8691 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8692 /* Docs says register only. */
8693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8695 {
8696 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8697 IEM_MC_BEGIN(2, 0);
8698 IEM_MC_ARG(uint64_t *, pDst, 0);
8699 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8700 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8701 IEM_MC_PREPARE_SSE_USAGE();
8702 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8703 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8704 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8705 IEM_MC_ADVANCE_RIP();
8706 IEM_MC_END();
8707 return VINF_SUCCESS;
8708 }
8709 return IEMOP_RAISE_INVALID_OPCODE();
8710}
8711
8712/* Opcode 0xf3 0x0f 0xd7 - invalid */
8713/* Opcode 0xf2 0x0f 0xd7 - invalid */
8714
8715
8716/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8717FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8718/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8719FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8720/* Opcode 0xf3 0x0f 0xd8 - invalid */
8721/* Opcode 0xf2 0x0f 0xd8 - invalid */
8722
8723/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8724FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8725/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8726FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8727/* Opcode 0xf3 0x0f 0xd9 - invalid */
8728/* Opcode 0xf2 0x0f 0xd9 - invalid */
8729
8730/** Opcode 0x0f 0xda - pminub Pq, Qq */
8731FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8732/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8733FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8734/* Opcode 0xf3 0x0f 0xda - invalid */
8735/* Opcode 0xf2 0x0f 0xda - invalid */
8736
8737/** Opcode 0x0f 0xdb - pand Pq, Qq */
8738FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8739/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8740FNIEMOP_STUB(iemOp_pand_Vx_W);
8741/* Opcode 0xf3 0x0f 0xdb - invalid */
8742/* Opcode 0xf2 0x0f 0xdb - invalid */
8743
8744/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8745FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8746/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8747FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8748/* Opcode 0xf3 0x0f 0xdc - invalid */
8749/* Opcode 0xf2 0x0f 0xdc - invalid */
8750
8751/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8752FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8753/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8754FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8755/* Opcode 0xf3 0x0f 0xdd - invalid */
8756/* Opcode 0xf2 0x0f 0xdd - invalid */
8757
8758/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8759FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8760/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8761FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8762/* Opcode 0xf3 0x0f 0xde - invalid */
8763/* Opcode 0xf2 0x0f 0xde - invalid */
8764
8765/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8766FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8767/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8768FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8769/* Opcode 0xf3 0x0f 0xdf - invalid */
8770/* Opcode 0xf2 0x0f 0xdf - invalid */
8771
8772/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8773FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8774/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8775FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8776/* Opcode 0xf3 0x0f 0xe0 - invalid */
8777/* Opcode 0xf2 0x0f 0xe0 - invalid */
8778
8779/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8780FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8781/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8782FNIEMOP_STUB(iemOp_psraw_Vx_W);
8783/* Opcode 0xf3 0x0f 0xe1 - invalid */
8784/* Opcode 0xf2 0x0f 0xe1 - invalid */
8785
8786/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8787FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8788/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8789FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8790/* Opcode 0xf3 0x0f 0xe2 - invalid */
8791/* Opcode 0xf2 0x0f 0xe2 - invalid */
8792
8793/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8794FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8795/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8796FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8797/* Opcode 0xf3 0x0f 0xe3 - invalid */
8798/* Opcode 0xf2 0x0f 0xe3 - invalid */
8799
8800/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8801FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8802/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8803FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8804/* Opcode 0xf3 0x0f 0xe4 - invalid */
8805/* Opcode 0xf2 0x0f 0xe4 - invalid */
8806
8807/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8808FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8809/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8810FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8811/* Opcode 0xf3 0x0f 0xe5 - invalid */
8812/* Opcode 0xf2 0x0f 0xe5 - invalid */
8813
8814/* Opcode 0x0f 0xe6 - invalid */
8815/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8816FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8817/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8818FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8819/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8820FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8821
8822
8823/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8824FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8825{
8826 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8828 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8829 {
8830 /* Register, memory. */
8831 IEM_MC_BEGIN(0, 2);
8832 IEM_MC_LOCAL(uint64_t, uSrc);
8833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8834
8835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8837 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8838 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8839
8840 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8841 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8842
8843 IEM_MC_ADVANCE_RIP();
8844 IEM_MC_END();
8845 return VINF_SUCCESS;
8846 }
8847 /* The register, register encoding is invalid. */
8848 return IEMOP_RAISE_INVALID_OPCODE();
8849}
8850
8851/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8852FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8853{
8854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8855 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8856 {
8857 /* Register, memory. */
8858 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8859 IEM_MC_BEGIN(0, 2);
8860 IEM_MC_LOCAL(RTUINT128U, uSrc);
8861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8862
8863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8865 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8866 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8867
8868 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8869 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8870
8871 IEM_MC_ADVANCE_RIP();
8872 IEM_MC_END();
8873 return VINF_SUCCESS;
8874 }
8875
8876 /* The register, register encoding is invalid. */
8877 return IEMOP_RAISE_INVALID_OPCODE();
8878}
8879
8880/* Opcode 0xf3 0x0f 0xe7 - invalid */
8881/* Opcode 0xf2 0x0f 0xe7 - invalid */
8882
8883
8884/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8885FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8886/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8887FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8888/* Opcode 0xf3 0x0f 0xe8 - invalid */
8889/* Opcode 0xf2 0x0f 0xe8 - invalid */
8890
8891/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8892FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8893/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8894FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8895/* Opcode 0xf3 0x0f 0xe9 - invalid */
8896/* Opcode 0xf2 0x0f 0xe9 - invalid */
8897
8898/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8899FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8900/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8901FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8902/* Opcode 0xf3 0x0f 0xea - invalid */
8903/* Opcode 0xf2 0x0f 0xea - invalid */
8904
8905/** Opcode 0x0f 0xeb - por Pq, Qq */
8906FNIEMOP_STUB(iemOp_por_Pq_Qq);
8907/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8908FNIEMOP_STUB(iemOp_por_Vx_W);
8909/* Opcode 0xf3 0x0f 0xeb - invalid */
8910/* Opcode 0xf2 0x0f 0xeb - invalid */
8911
8912/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8913FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8914/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8915FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8916/* Opcode 0xf3 0x0f 0xec - invalid */
8917/* Opcode 0xf2 0x0f 0xec - invalid */
8918
8919/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8920FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8921/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8922FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8923/* Opcode 0xf3 0x0f 0xed - invalid */
8924/* Opcode 0xf2 0x0f 0xed - invalid */
8925
8926/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8927FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8928/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8929FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8930/* Opcode 0xf3 0x0f 0xee - invalid */
8931/* Opcode 0xf2 0x0f 0xee - invalid */
8932
8933
8934/** Opcode 0x0f 0xef - pxor Pq, Qq */
8935FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8936{
8937 IEMOP_MNEMONIC(pxor, "pxor");
8938 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8939}
8940
8941/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8942FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8943{
8944 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8945 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8946}
8947
8948/* Opcode 0xf3 0x0f 0xef - invalid */
8949/* Opcode 0xf2 0x0f 0xef - invalid */
8950
8951/* Opcode 0x0f 0xf0 - invalid */
8952/* Opcode 0x66 0x0f 0xf0 - invalid */
8953/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8954FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8955
8956/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8957FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8958/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8959FNIEMOP_STUB(iemOp_psllw_Vx_W);
8960/* Opcode 0xf2 0x0f 0xf1 - invalid */
8961
8962/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8963FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8964/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8965FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8966/* Opcode 0xf2 0x0f 0xf2 - invalid */
8967
8968/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8969FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8970/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8971FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8972/* Opcode 0xf2 0x0f 0xf3 - invalid */
8973
8974/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8975FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8976/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8977FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8978/* Opcode 0xf2 0x0f 0xf4 - invalid */
8979
8980/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8981FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8982/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8983FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8984/* Opcode 0xf2 0x0f 0xf5 - invalid */
8985
8986/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8987FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8988/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8989FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8990/* Opcode 0xf2 0x0f 0xf6 - invalid */
8991
8992/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8993FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8994/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8995FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8996/* Opcode 0xf2 0x0f 0xf7 - invalid */
8997
8998/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8999FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
9000/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
9001FNIEMOP_STUB(iemOp_psubb_Vx_W);
9002/* Opcode 0xf2 0x0f 0xf8 - invalid */
9003
9004/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
9005FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
9006/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
9007FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
9008/* Opcode 0xf2 0x0f 0xf9 - invalid */
9009
9010/** Opcode 0x0f 0xfa - psubd Pq, Qq */
9011FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
9012/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9013FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9014/* Opcode 0xf2 0x0f 0xfa - invalid */
9015
9016/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9017FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9018/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9019FNIEMOP_STUB(iemOp_psubq_Vx_W);
9020/* Opcode 0xf2 0x0f 0xfb - invalid */
9021
9022/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9023FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9024/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9025FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9026/* Opcode 0xf2 0x0f 0xfc - invalid */
9027
9028/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9029FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9030/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9031FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9032/* Opcode 0xf2 0x0f 0xfd - invalid */
9033
9034/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9035FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9036/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9037FNIEMOP_STUB(iemOp_paddd_Vx_W);
9038/* Opcode 0xf2 0x0f 0xfe - invalid */
9039
9040
9041/** Opcode **** 0x0f 0xff - UD0 */
9042FNIEMOP_DEF(iemOp_ud0)
9043{
9044 IEMOP_MNEMONIC(ud0, "ud0");
9045 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9046 {
9047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9048#ifndef TST_IEM_CHECK_MC
9049 RTGCPTR GCPtrEff;
9050 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9051 if (rcStrict != VINF_SUCCESS)
9052 return rcStrict;
9053#endif
9054 IEMOP_HLP_DONE_DECODING();
9055 }
9056 return IEMOP_RAISE_INVALID_OPCODE();
9057}
9058
9059
9060
9061/**
9062 * Two byte opcode map, first byte 0x0f.
9063 *
9064 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9065 * check if it needs updating as well when making changes.
9066 */
9067IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9068{
9069 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9070 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9071 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9072 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9073 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9074 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9075 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9076 /* 0x06 */ IEMOP_X4(iemOp_clts),
9077 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9078 /* 0x08 */ IEMOP_X4(iemOp_invd),
9079 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9080 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9081 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9082 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9083 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9084 /* 0x0e */ IEMOP_X4(iemOp_femms),
9085 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9086
9087 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9088 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9089 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9090 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9091 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9092 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9093 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9094 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9095 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9096 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9097 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9098 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9099 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9100 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9101 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9102 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9103
9104 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9105 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9106 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9107 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9108 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9109 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9110 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9111 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9112 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9113 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9114 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9115 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9116 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9117 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9118 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9119 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9120
9121 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9122 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9123 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9124 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9125 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9126 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9127 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9128 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9129 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9130 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9131 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9132 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9133 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9134 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9135 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9136 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9137
9138 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9139 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9140 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9141 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9142 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9143 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9144 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9145 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9146 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9147 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9148 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9149 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9150 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9151 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9152 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9153 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9154
9155 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9156 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9157 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9158 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9159 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9160 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9161 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9162 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9163 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9164 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9165 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9166 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9167 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9168 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9169 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9170 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9171
9172 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9173 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9175 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9176 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9177 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9178 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9179 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9180 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9181 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9182 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9184 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9185 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9188
9189 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9190 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9191 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9192 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9193 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9194 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9195 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9196 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9197
9198 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9199 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9200 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9201 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9202 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9203 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9204 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9205 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9206
9207 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9208 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9209 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9210 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9211 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9212 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9213 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9214 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9215 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9216 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9217 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9218 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9219 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9220 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9221 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9222 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9223
9224 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9225 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9226 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9227 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9228 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9229 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9230 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9231 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9232 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9233 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9234 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9235 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9236 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9237 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9238 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9239 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9240
9241 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9242 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9243 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9244 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9245 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9246 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9247 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9248 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9249 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9250 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9251 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9252 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9253 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9254 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9255 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9256 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9257
9258 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9259 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9260 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9261 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9262 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9263 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9264 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9265 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9266 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9267 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9268 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9269 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9270 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9271 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9272 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9273 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9274
9275 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9276 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9277 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9278 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9279 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9280 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9281 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9282 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9283 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9284 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9285 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9286 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9287 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9288 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9289 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9290 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9291
9292 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9293 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9294 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9295 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9296 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9297 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9298 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9299 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9300 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9301 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9302 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9303 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9304 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9305 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9306 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9307 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9308
9309 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9310 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9311 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9312 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9313 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9314 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9315 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9316 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9317 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9318 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9319 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9320 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9321 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9322 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9323 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9324 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9325
9326 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9327 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9328 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9329 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9330 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9331 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9332 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9333 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9334 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9335 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9336 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9337 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9338 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9339 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9340 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9341 /* 0xff */ IEMOP_X4(iemOp_ud0),
9342};
9343AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9344
9345/** @} */
9346
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette