VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 67006

Last change on this file since 67006 was 67006, checked in by vboxsync, 8 years ago

IEM: Implemented vmovd Vd,Ed and vmovq Vq,Eq (VEX.66.0F 6e).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 326.8 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 67006 2017-05-22 11:36:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2017 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27/** Opcode 0x0f 0x00 /0. */
28FNIEMOPRM_DEF(iemOp_Grp6_sldt)
29{
30 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
31 IEMOP_HLP_MIN_286();
32 IEMOP_HLP_NO_REAL_OR_V86_MODE();
33
34 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
35 {
36 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
37 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
38 switch (pVCpu->iem.s.enmEffOpSize)
39 {
40 case IEMMODE_16BIT:
41 IEM_MC_BEGIN(0, 1);
42 IEM_MC_LOCAL(uint16_t, u16Ldtr);
43 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
44 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
45 IEM_MC_ADVANCE_RIP();
46 IEM_MC_END();
47 break;
48
49 case IEMMODE_32BIT:
50 IEM_MC_BEGIN(0, 1);
51 IEM_MC_LOCAL(uint32_t, u32Ldtr);
52 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
53 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
54 IEM_MC_ADVANCE_RIP();
55 IEM_MC_END();
56 break;
57
58 case IEMMODE_64BIT:
59 IEM_MC_BEGIN(0, 1);
60 IEM_MC_LOCAL(uint64_t, u64Ldtr);
61 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
62 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 break;
66
67 IEM_NOT_REACHED_DEFAULT_CASE_RET();
68 }
69 }
70 else
71 {
72 IEM_MC_BEGIN(0, 2);
73 IEM_MC_LOCAL(uint16_t, u16Ldtr);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
76 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
77 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_LDTR_READS, SVM_EXIT_LDTR_READ, 0, 0);
78 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
79 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/** Opcode 0x0f 0x00 /1. */
88FNIEMOPRM_DEF(iemOp_Grp6_str)
89{
90 IEMOP_MNEMONIC(str, "str Rv/Mw");
91 IEMOP_HLP_MIN_286();
92 IEMOP_HLP_NO_REAL_OR_V86_MODE();
93
94 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
95 {
96 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
97 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
98 switch (pVCpu->iem.s.enmEffOpSize)
99 {
100 case IEMMODE_16BIT:
101 IEM_MC_BEGIN(0, 1);
102 IEM_MC_LOCAL(uint16_t, u16Tr);
103 IEM_MC_FETCH_TR_U16(u16Tr);
104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
105 IEM_MC_ADVANCE_RIP();
106 IEM_MC_END();
107 break;
108
109 case IEMMODE_32BIT:
110 IEM_MC_BEGIN(0, 1);
111 IEM_MC_LOCAL(uint32_t, u32Tr);
112 IEM_MC_FETCH_TR_U32(u32Tr);
113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
114 IEM_MC_ADVANCE_RIP();
115 IEM_MC_END();
116 break;
117
118 case IEMMODE_64BIT:
119 IEM_MC_BEGIN(0, 1);
120 IEM_MC_LOCAL(uint64_t, u64Tr);
121 IEM_MC_FETCH_TR_U64(u64Tr);
122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 break;
126
127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
128 }
129 }
130 else
131 {
132 IEM_MC_BEGIN(0, 2);
133 IEM_MC_LOCAL(uint16_t, u16Tr);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
136 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
137 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_TR_READS, SVM_EXIT_TR_READ, 0, 0);
138 IEM_MC_FETCH_TR_U16(u16Tr);
139 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
140 IEM_MC_ADVANCE_RIP();
141 IEM_MC_END();
142 }
143 return VINF_SUCCESS;
144}
145
146
147/** Opcode 0x0f 0x00 /2. */
148FNIEMOPRM_DEF(iemOp_Grp6_lldt)
149{
150 IEMOP_MNEMONIC(lldt, "lldt Ew");
151 IEMOP_HLP_MIN_286();
152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
153
154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
155 {
156 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
157 IEM_MC_BEGIN(1, 0);
158 IEM_MC_ARG(uint16_t, u16Sel, 0);
159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
161 IEM_MC_END();
162 }
163 else
164 {
165 IEM_MC_BEGIN(1, 1);
166 IEM_MC_ARG(uint16_t, u16Sel, 0);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
169 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
170 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
171 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
172 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
173 IEM_MC_END();
174 }
175 return VINF_SUCCESS;
176}
177
178
179/** Opcode 0x0f 0x00 /3. */
180FNIEMOPRM_DEF(iemOp_Grp6_ltr)
181{
182 IEMOP_MNEMONIC(ltr, "ltr Ew");
183 IEMOP_HLP_MIN_286();
184 IEMOP_HLP_NO_REAL_OR_V86_MODE();
185
186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
187 {
188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
189 IEM_MC_BEGIN(1, 0);
190 IEM_MC_ARG(uint16_t, u16Sel, 0);
191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
192 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
193 IEM_MC_END();
194 }
195 else
196 {
197 IEM_MC_BEGIN(1, 1);
198 IEM_MC_ARG(uint16_t, u16Sel, 0);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
202 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
203 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
204 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
205 IEM_MC_END();
206 }
207 return VINF_SUCCESS;
208}
209
210
211/** Opcode 0x0f 0x00 /3. */
212FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
213{
214 IEMOP_HLP_MIN_286();
215 IEMOP_HLP_NO_REAL_OR_V86_MODE();
216
217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
218 {
219 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
220 IEM_MC_BEGIN(2, 0);
221 IEM_MC_ARG(uint16_t, u16Sel, 0);
222 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
223 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
224 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
225 IEM_MC_END();
226 }
227 else
228 {
229 IEM_MC_BEGIN(2, 1);
230 IEM_MC_ARG(uint16_t, u16Sel, 0);
231 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
234 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
235 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
236 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
237 IEM_MC_END();
238 }
239 return VINF_SUCCESS;
240}
241
242
243/** Opcode 0x0f 0x00 /4. */
244FNIEMOPRM_DEF(iemOp_Grp6_verr)
245{
246 IEMOP_MNEMONIC(verr, "verr Ew");
247 IEMOP_HLP_MIN_286();
248 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
249}
250
251
252/** Opcode 0x0f 0x00 /5. */
253FNIEMOPRM_DEF(iemOp_Grp6_verw)
254{
255 IEMOP_MNEMONIC(verw, "verw Ew");
256 IEMOP_HLP_MIN_286();
257 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
258}
259
260
261/**
262 * Group 6 jump table.
263 */
264IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
265{
266 iemOp_Grp6_sldt,
267 iemOp_Grp6_str,
268 iemOp_Grp6_lldt,
269 iemOp_Grp6_ltr,
270 iemOp_Grp6_verr,
271 iemOp_Grp6_verw,
272 iemOp_InvalidWithRM,
273 iemOp_InvalidWithRM
274};
275
276/** Opcode 0x0f 0x00. */
277FNIEMOP_DEF(iemOp_Grp6)
278{
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
280 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
281}
282
283
284/** Opcode 0x0f 0x01 /0. */
285FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
286{
287 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
288 IEMOP_HLP_MIN_286();
289 IEMOP_HLP_64BIT_OP_SIZE();
290 IEM_MC_BEGIN(2, 1);
291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
292 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
295 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
296 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
297 IEM_MC_END();
298 return VINF_SUCCESS;
299}
300
301
302/** Opcode 0x0f 0x01 /0. */
303FNIEMOP_DEF(iemOp_Grp7_vmcall)
304{
305 IEMOP_BITCH_ABOUT_STUB();
306 return IEMOP_RAISE_INVALID_OPCODE();
307}
308
309
310/** Opcode 0x0f 0x01 /0. */
311FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
312{
313 IEMOP_BITCH_ABOUT_STUB();
314 return IEMOP_RAISE_INVALID_OPCODE();
315}
316
317
318/** Opcode 0x0f 0x01 /0. */
319FNIEMOP_DEF(iemOp_Grp7_vmresume)
320{
321 IEMOP_BITCH_ABOUT_STUB();
322 return IEMOP_RAISE_INVALID_OPCODE();
323}
324
325
326/** Opcode 0x0f 0x01 /0. */
327FNIEMOP_DEF(iemOp_Grp7_vmxoff)
328{
329 IEMOP_BITCH_ABOUT_STUB();
330 return IEMOP_RAISE_INVALID_OPCODE();
331}
332
333
334/** Opcode 0x0f 0x01 /1. */
335FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
336{
337 IEMOP_MNEMONIC(sidt, "sidt Ms");
338 IEMOP_HLP_MIN_286();
339 IEMOP_HLP_64BIT_OP_SIZE();
340 IEM_MC_BEGIN(2, 1);
341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
342 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
345 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
346 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
347 IEM_MC_END();
348 return VINF_SUCCESS;
349}
350
351
352/** Opcode 0x0f 0x01 /1. */
353FNIEMOP_DEF(iemOp_Grp7_monitor)
354{
355 IEMOP_MNEMONIC(monitor, "monitor");
356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
357 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
358}
359
360
361/** Opcode 0x0f 0x01 /1. */
362FNIEMOP_DEF(iemOp_Grp7_mwait)
363{
364 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
367}
368
369
370/** Opcode 0x0f 0x01 /2. */
371FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
372{
373 IEMOP_MNEMONIC(lgdt, "lgdt");
374 IEMOP_HLP_64BIT_OP_SIZE();
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint8_t, iEffSeg, 0);
377 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
381 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
382 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
383 IEM_MC_END();
384 return VINF_SUCCESS;
385}
386
387
388/** Opcode 0x0f 0x01 0xd0. */
389FNIEMOP_DEF(iemOp_Grp7_xgetbv)
390{
391 IEMOP_MNEMONIC(xgetbv, "xgetbv");
392 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
393 {
394 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
395 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
396 }
397 return IEMOP_RAISE_INVALID_OPCODE();
398}
399
400
401/** Opcode 0x0f 0x01 0xd1. */
402FNIEMOP_DEF(iemOp_Grp7_xsetbv)
403{
404 IEMOP_MNEMONIC(xsetbv, "xsetbv");
405 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
406 {
407 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
408 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
409 }
410 return IEMOP_RAISE_INVALID_OPCODE();
411}
412
413
414/** Opcode 0x0f 0x01 /3. */
415FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
416{
417 IEMOP_MNEMONIC(lidt, "lidt");
418 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
419 ? IEMMODE_64BIT
420 : pVCpu->iem.s.enmEffOpSize;
421 IEM_MC_BEGIN(3, 1);
422 IEM_MC_ARG(uint8_t, iEffSeg, 0);
423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
424 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
428 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
429 IEM_MC_END();
430 return VINF_SUCCESS;
431}
432
433
434#ifdef VBOX_WITH_NESTED_HWVIRT
435/** Opcode 0x0f 0x01 0xd8. */
436FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
437{
438 IEMOP_MNEMONIC(vmrun, "vmrun");
439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
440}
441
442/** Opcode 0x0f 0x01 0xd9. */
443FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
444{
445 IEMOP_MNEMONIC(vmmcall, "vmmcall");
446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
447}
448
449
450/** Opcode 0x0f 0x01 0xda. */
451FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
452{
453 IEMOP_MNEMONIC(vmload, "vmload");
454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
455}
456
457
458/** Opcode 0x0f 0x01 0xdb. */
459FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
460{
461 IEMOP_MNEMONIC(vmsave, "vmsave");
462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
463}
464
465
466/** Opcode 0x0f 0x01 0xdc. */
467FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
468{
469 IEMOP_MNEMONIC(stgi, "stgi");
470 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
471}
472
473
474/** Opcode 0x0f 0x01 0xdd. */
475FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
476{
477 IEMOP_MNEMONIC(clgi, "clgi");
478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
479}
480
481
482/** Opcode 0x0f 0x01 0xdf. */
483FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
484{
485 IEMOP_MNEMONIC(invlpga, "invlpga");
486 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
487}
488
489
490/** Opcode 0x0f 0x01 0xde. */
491FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
492{
493 IEMOP_MNEMONIC(skinit, "skinit");
494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
495}
496#else
497/** Opcode 0x0f 0x01 0xd8. */
498FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
499
500/** Opcode 0x0f 0x01 0xd9. */
501FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
502
503/** Opcode 0x0f 0x01 0xda. */
504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
505
506/** Opcode 0x0f 0x01 0xdb. */
507FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
508
509/** Opcode 0x0f 0x01 0xdc. */
510FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
511
512/** Opcode 0x0f 0x01 0xdd. */
513FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
514
515/** Opcode 0x0f 0x01 0xdf. */
516FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
517
518/** Opcode 0x0f 0x01 0xde. */
519FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
520#endif /* VBOX_WITH_NESTED_HWVIRT */
521
522/** Opcode 0x0f 0x01 /4. */
523FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
524{
525 IEMOP_MNEMONIC(smsw, "smsw");
526 IEMOP_HLP_MIN_286();
527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
528 {
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
531 switch (pVCpu->iem.s.enmEffOpSize)
532 {
533 case IEMMODE_16BIT:
534 IEM_MC_BEGIN(0, 1);
535 IEM_MC_LOCAL(uint16_t, u16Tmp);
536 IEM_MC_FETCH_CR0_U16(u16Tmp);
537 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
538 { /* likely */ }
539 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
541 else
542 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
543 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
544 IEM_MC_ADVANCE_RIP();
545 IEM_MC_END();
546 return VINF_SUCCESS;
547
548 case IEMMODE_32BIT:
549 IEM_MC_BEGIN(0, 1);
550 IEM_MC_LOCAL(uint32_t, u32Tmp);
551 IEM_MC_FETCH_CR0_U32(u32Tmp);
552 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
553 IEM_MC_ADVANCE_RIP();
554 IEM_MC_END();
555 return VINF_SUCCESS;
556
557 case IEMMODE_64BIT:
558 IEM_MC_BEGIN(0, 1);
559 IEM_MC_LOCAL(uint64_t, u64Tmp);
560 IEM_MC_FETCH_CR0_U64(u64Tmp);
561 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
562 IEM_MC_ADVANCE_RIP();
563 IEM_MC_END();
564 return VINF_SUCCESS;
565
566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
567 }
568 }
569 else
570 {
571 /* Ignore operand size here, memory refs are always 16-bit. */
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint16_t, u16Tmp);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
577 IEMOP_HLP_SVM_READ_CR_INTERCEPT(pVCpu, /*cr*/ 0, 0 /* uExitInfo1 */, 0 /* uExitInfo2 */);
578 IEM_MC_FETCH_CR0_U16(u16Tmp);
579 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
580 { /* likely */ }
581 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
582 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
583 else
584 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
585 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590}
591
592
593/** Opcode 0x0f 0x01 /6. */
594FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
595{
596 /* The operand size is effectively ignored, all is 16-bit and only the
597 lower 3-bits are used. */
598 IEMOP_MNEMONIC(lmsw, "lmsw");
599 IEMOP_HLP_MIN_286();
600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
601 {
602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
603 IEM_MC_BEGIN(1, 0);
604 IEM_MC_ARG(uint16_t, u16Tmp, 0);
605 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
606 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
607 IEM_MC_END();
608 }
609 else
610 {
611 IEM_MC_BEGIN(1, 1);
612 IEM_MC_ARG(uint16_t, u16Tmp, 0);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
616 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
617 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
618 IEM_MC_END();
619 }
620 return VINF_SUCCESS;
621}
622
623
624/** Opcode 0x0f 0x01 /7. */
625FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
626{
627 IEMOP_MNEMONIC(invlpg, "invlpg");
628 IEMOP_HLP_MIN_486();
629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
630 IEM_MC_BEGIN(1, 1);
631 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
633 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
634 IEM_MC_END();
635 return VINF_SUCCESS;
636}
637
638
639/** Opcode 0x0f 0x01 /7. */
640FNIEMOP_DEF(iemOp_Grp7_swapgs)
641{
642 IEMOP_MNEMONIC(swapgs, "swapgs");
643 IEMOP_HLP_ONLY_64BIT();
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
646}
647
648
649/** Opcode 0x0f 0x01 /7. */
650FNIEMOP_DEF(iemOp_Grp7_rdtscp)
651{
652 IEMOP_MNEMONIC(rdtscp, "rdtscp");
653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
654 /** @todo SVM intercept removal from here. */
655 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RDTSCP, SVM_EXIT_RDTSCP, 0, 0);
656 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
657}
658
659
660/**
661 * Group 7 jump table, memory variant.
662 */
663IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
664{
665 iemOp_Grp7_sgdt,
666 iemOp_Grp7_sidt,
667 iemOp_Grp7_lgdt,
668 iemOp_Grp7_lidt,
669 iemOp_Grp7_smsw,
670 iemOp_InvalidWithRM,
671 iemOp_Grp7_lmsw,
672 iemOp_Grp7_invlpg
673};
674
675
676/** Opcode 0x0f 0x01. */
677FNIEMOP_DEF(iemOp_Grp7)
678{
679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
680 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
681 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
682
683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
684 {
685 case 0:
686 switch (bRm & X86_MODRM_RM_MASK)
687 {
688 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
689 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
690 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
691 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
692 }
693 return IEMOP_RAISE_INVALID_OPCODE();
694
695 case 1:
696 switch (bRm & X86_MODRM_RM_MASK)
697 {
698 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
699 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
700 }
701 return IEMOP_RAISE_INVALID_OPCODE();
702
703 case 2:
704 switch (bRm & X86_MODRM_RM_MASK)
705 {
706 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
707 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
708 }
709 return IEMOP_RAISE_INVALID_OPCODE();
710
711 case 3:
712 switch (bRm & X86_MODRM_RM_MASK)
713 {
714 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
715 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
716 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
717 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
718 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
719 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
720 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
721 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
723 }
724
725 case 4:
726 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
727
728 case 5:
729 return IEMOP_RAISE_INVALID_OPCODE();
730
731 case 6:
732 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
733
734 case 7:
735 switch (bRm & X86_MODRM_RM_MASK)
736 {
737 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
738 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
739 }
740 return IEMOP_RAISE_INVALID_OPCODE();
741
742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
743 }
744}
745
746/** Opcode 0x0f 0x00 /3. */
747FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
748{
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751
752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
753 {
754 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 switch (pVCpu->iem.s.enmEffOpSize)
756 {
757 case IEMMODE_16BIT:
758 {
759 IEM_MC_BEGIN(3, 0);
760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
761 IEM_MC_ARG(uint16_t, u16Sel, 1);
762 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
763
764 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
765 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
766 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
767
768 IEM_MC_END();
769 return VINF_SUCCESS;
770 }
771
772 case IEMMODE_32BIT:
773 case IEMMODE_64BIT:
774 {
775 IEM_MC_BEGIN(3, 0);
776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
777 IEM_MC_ARG(uint16_t, u16Sel, 1);
778 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
779
780 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
781 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
782 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
783
784 IEM_MC_END();
785 return VINF_SUCCESS;
786 }
787
788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
789 }
790 }
791 else
792 {
793 switch (pVCpu->iem.s.enmEffOpSize)
794 {
795 case IEMMODE_16BIT:
796 {
797 IEM_MC_BEGIN(3, 1);
798 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
799 IEM_MC_ARG(uint16_t, u16Sel, 1);
800 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
802
803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
804 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
805
806 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
809
810 IEM_MC_END();
811 return VINF_SUCCESS;
812 }
813
814 case IEMMODE_32BIT:
815 case IEMMODE_64BIT:
816 {
817 IEM_MC_BEGIN(3, 1);
818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 1);
820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
822
823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
824 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
825/** @todo testcase: make sure it's a 16-bit read. */
826
827 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
828 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
829 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
830
831 IEM_MC_END();
832 return VINF_SUCCESS;
833 }
834
835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
836 }
837 }
838}
839
840
841
842/** Opcode 0x0f 0x02. */
843FNIEMOP_DEF(iemOp_lar_Gv_Ew)
844{
845 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
846 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
847}
848
849
850/** Opcode 0x0f 0x03. */
851FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
852{
853 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
854 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
855}
856
857
858/** Opcode 0x0f 0x05. */
859FNIEMOP_DEF(iemOp_syscall)
860{
861 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
863 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
864}
865
866
867/** Opcode 0x0f 0x06. */
868FNIEMOP_DEF(iemOp_clts)
869{
870 IEMOP_MNEMONIC(clts, "clts");
871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
872 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
873}
874
875
876/** Opcode 0x0f 0x07. */
877FNIEMOP_DEF(iemOp_sysret)
878{
879 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
881 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
882}
883
884
885/** Opcode 0x0f 0x08. */
886FNIEMOP_DEF(iemOp_invd)
887{
888 IEMOP_MNEMONIC(invd, "invd");
889#ifdef VBOX_WITH_NESTED_HWVIRT
890 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
891 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_INVD, SVM_EXIT_INVD, 0, 0);
892#else
893 RT_NOREF_PV(pVCpu);
894#endif
895 /** @todo implement invd for the regular case (above only handles nested SVM
896 * exits). */
897 IEMOP_BITCH_ABOUT_STUB();
898 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
899}
900
901// IEMOP_HLP_MIN_486();
902
903
904/** Opcode 0x0f 0x09. */
905FNIEMOP_DEF(iemOp_wbinvd)
906{
907 IEMOP_MNEMONIC(wbinvd, "wbinvd");
908 IEMOP_HLP_MIN_486();
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_BEGIN(0, 0);
911 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
912 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_WBINVD, SVM_EXIT_WBINVD, 0, 0);
913 IEM_MC_ADVANCE_RIP();
914 IEM_MC_END();
915 return VINF_SUCCESS; /* ignore for now */
916}
917
918
919/** Opcode 0x0f 0x0b. */
920FNIEMOP_DEF(iemOp_ud2)
921{
922 IEMOP_MNEMONIC(ud2, "ud2");
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926/** Opcode 0x0f 0x0d. */
927FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
928{
929 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
931 {
932 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
933 return IEMOP_RAISE_INVALID_OPCODE();
934 }
935
936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
938 {
939 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
940 return IEMOP_RAISE_INVALID_OPCODE();
941 }
942
943 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
944 {
945 case 2: /* Aliased to /0 for the time being. */
946 case 4: /* Aliased to /0 for the time being. */
947 case 5: /* Aliased to /0 for the time being. */
948 case 6: /* Aliased to /0 for the time being. */
949 case 7: /* Aliased to /0 for the time being. */
950 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
951 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
952 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
954 }
955
956 IEM_MC_BEGIN(0, 1);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
960 /* Currently a NOP. */
961 NOREF(GCPtrEffSrc);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x0e. */
969FNIEMOP_STUB(iemOp_femms);
970
971
972/** Opcode 0x0f 0x0f. */
973FNIEMOP_DEF(iemOp_3Dnow)
974{
975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
976 {
977 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
978 return IEMOP_RAISE_INVALID_OPCODE();
979 }
980
981#ifdef IEM_WITH_3DNOW
982 /* This is pretty sparse, use switch instead of table. */
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
985#else
986 IEMOP_BITCH_ABOUT_STUB();
987 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
988#endif
989}
990
991
992/**
993 * @opcode 0x10
994 * @oppfx none
995 * @opcpuid sse
996 * @opgroup og_sse_simdfp_datamove
997 * @opxcpttype 4UA
998 * @optest op1=1 op2=2 -> op1=2
999 * @optest op1=0 op2=-22 -> op1=-22
1000 */
1001FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1002{
1003 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 /*
1008 * Register, register.
1009 */
1010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1011 IEM_MC_BEGIN(0, 0);
1012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1014 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1015 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1016 IEM_MC_ADVANCE_RIP();
1017 IEM_MC_END();
1018 }
1019 else
1020 {
1021 /*
1022 * Memory, register.
1023 */
1024 IEM_MC_BEGIN(0, 2);
1025 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 return VINF_SUCCESS;
1040
1041}
1042
1043
1044/**
1045 * @opcode 0x10
1046 * @oppfx 0x66
1047 * @opcpuid sse2
1048 * @opgroup og_sse2_pcksclr_datamove
1049 * @opxcpttype 4UA
1050 * @optest op1=1 op2=2 -> op1=2
1051 * @optest op1=0 op2=-42 -> op1=-42
1052 */
1053FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1054{
1055 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1058 {
1059 /*
1060 * Register, register.
1061 */
1062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1063 IEM_MC_BEGIN(0, 0);
1064 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1066 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1067 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1068 IEM_MC_ADVANCE_RIP();
1069 IEM_MC_END();
1070 }
1071 else
1072 {
1073 /*
1074 * Memory, register.
1075 */
1076 IEM_MC_BEGIN(0, 2);
1077 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1079
1080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1084
1085 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1086 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1087
1088 IEM_MC_ADVANCE_RIP();
1089 IEM_MC_END();
1090 }
1091 return VINF_SUCCESS;
1092}
1093
1094
1095/**
1096 * @opcode 0x10
1097 * @oppfx 0xf3
1098 * @opcpuid sse
1099 * @opgroup og_sse_simdfp_datamove
1100 * @opxcpttype 5
1101 * @optest op1=1 op2=2 -> op1=2
1102 * @optest op1=0 op2=-22 -> op1=-22
1103 */
1104FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1105{
1106 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1114 IEM_MC_BEGIN(0, 1);
1115 IEM_MC_LOCAL(uint32_t, uSrc);
1116
1117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1119 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Memory, register.
1129 */
1130 IEM_MC_BEGIN(0, 2);
1131 IEM_MC_LOCAL(uint32_t, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1136 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1137 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1138
1139 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1140 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1141
1142 IEM_MC_ADVANCE_RIP();
1143 IEM_MC_END();
1144 }
1145 return VINF_SUCCESS;
1146}
1147
1148
1149/**
1150 * @opcode 0x10
1151 * @oppfx 0xf2
1152 * @opcpuid sse2
1153 * @opgroup og_sse2_pcksclr_datamove
1154 * @opxcpttype 5
1155 * @optest op1=1 op2=2 -> op1=2
1156 * @optest op1=0 op2=-42 -> op1=-42
1157 */
1158FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
1159{
1160 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1168 IEM_MC_BEGIN(0, 1);
1169 IEM_MC_LOCAL(uint64_t, uSrc);
1170
1171 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1173 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1174 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1175
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 }
1179 else
1180 {
1181 /*
1182 * Memory, register.
1183 */
1184 IEM_MC_BEGIN(0, 2);
1185 IEM_MC_LOCAL(uint64_t, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187
1188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1192
1193 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1194 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1195
1196 IEM_MC_ADVANCE_RIP();
1197 IEM_MC_END();
1198 }
1199 return VINF_SUCCESS;
1200}
1201
1202
1203/**
1204 * @opcode 0x11
1205 * @oppfx none
1206 * @opcpuid sse
1207 * @opgroup og_sse_simdfp_datamove
1208 * @opxcpttype 4UA
1209 * @optest op1=1 op2=2 -> op1=2
1210 * @optest op1=0 op2=-42 -> op1=-42
1211 */
1212FNIEMOP_DEF(iemOp_movups_Wps_Vps)
1213{
1214 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1217 {
1218 /*
1219 * Register, register.
1220 */
1221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1222 IEM_MC_BEGIN(0, 0);
1223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1225 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1226 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1227 IEM_MC_ADVANCE_RIP();
1228 IEM_MC_END();
1229 }
1230 else
1231 {
1232 /*
1233 * Memory, register.
1234 */
1235 IEM_MC_BEGIN(0, 2);
1236 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1238
1239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1243
1244 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1245 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1246
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 }
1250 return VINF_SUCCESS;
1251}
1252
1253
1254/**
1255 * @opcode 0x11
1256 * @oppfx 0x66
1257 * @opcpuid sse2
1258 * @opgroup og_sse2_pcksclr_datamove
1259 * @opxcpttype 4UA
1260 * @optest op1=1 op2=2 -> op1=2
1261 * @optest op1=0 op2=-42 -> op1=-42
1262 */
1263FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
1264{
1265 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1268 {
1269 /*
1270 * Register, register.
1271 */
1272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1276 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1277 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1278 IEM_MC_ADVANCE_RIP();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Memory, register.
1285 */
1286 IEM_MC_BEGIN(0, 2);
1287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1289
1290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1294
1295 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1297
1298 IEM_MC_ADVANCE_RIP();
1299 IEM_MC_END();
1300 }
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/**
1306 * @opcode 0x11
1307 * @oppfx 0xf3
1308 * @opcpuid sse
1309 * @opgroup og_sse_simdfp_datamove
1310 * @opxcpttype 5
1311 * @optest op1=1 op2=2 -> op1=2
1312 * @optest op1=0 op2=-22 -> op1=-22
1313 */
1314FNIEMOP_DEF(iemOp_movss_Wss_Vss)
1315{
1316 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1319 {
1320 /*
1321 * Register, register.
1322 */
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEM_MC_BEGIN(0, 1);
1325 IEM_MC_LOCAL(uint32_t, uSrc);
1326
1327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1329 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1330 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1331
1332 IEM_MC_ADVANCE_RIP();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 /*
1338 * Memory, register.
1339 */
1340 IEM_MC_BEGIN(0, 2);
1341 IEM_MC_LOCAL(uint32_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1350 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP();
1353 IEM_MC_END();
1354 }
1355 return VINF_SUCCESS;
1356}
1357
1358
1359/**
1360 * @opcode 0x11
1361 * @oppfx 0xf2
1362 * @opcpuid sse2
1363 * @opgroup og_sse2_pcksclr_datamove
1364 * @opxcpttype 5
1365 * @optest op1=1 op2=2 -> op1=2
1366 * @optest op1=0 op2=-42 -> op1=-42
1367 */
1368FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
1369{
1370 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1373 {
1374 /*
1375 * Register, register.
1376 */
1377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1378 IEM_MC_BEGIN(0, 1);
1379 IEM_MC_LOCAL(uint64_t, uSrc);
1380
1381 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1383 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1384 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1385
1386 IEM_MC_ADVANCE_RIP();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Memory, register.
1393 */
1394 IEM_MC_BEGIN(0, 2);
1395 IEM_MC_LOCAL(uint64_t, uSrc);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397
1398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1400 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1401 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1402
1403 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1404 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1405
1406 IEM_MC_ADVANCE_RIP();
1407 IEM_MC_END();
1408 }
1409 return VINF_SUCCESS;
1410}
1411
1412
1413FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
1414{
1415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1417 {
1418 /**
1419 * @opcode 0x12
1420 * @opcodesub 11 mr/reg
1421 * @oppfx none
1422 * @opcpuid sse
1423 * @opgroup og_sse_simdfp_datamove
1424 * @opxcpttype 5
1425 * @optest op1=1 op2=2 -> op1=2
1426 * @optest op1=0 op2=-42 -> op1=-42
1427 */
1428 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1429
1430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, uSrc);
1433
1434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1436 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1438
1439 IEM_MC_ADVANCE_RIP();
1440 IEM_MC_END();
1441 }
1442 else
1443 {
1444 /**
1445 * @opdone
1446 * @opcode 0x12
1447 * @opcodesub !11 mr/reg
1448 * @oppfx none
1449 * @opcpuid sse
1450 * @opgroup og_sse_simdfp_datamove
1451 * @opxcpttype 5
1452 * @optest op1=1 op2=2 -> op1=2
1453 * @optest op1=0 op2=-42 -> op1=-42
1454 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
1455 */
1456 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1457
1458 IEM_MC_BEGIN(0, 2);
1459 IEM_MC_LOCAL(uint64_t, uSrc);
1460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1461
1462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1465 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1466
1467 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1468 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1469
1470 IEM_MC_ADVANCE_RIP();
1471 IEM_MC_END();
1472 }
1473 return VINF_SUCCESS;
1474}
1475
1476
1477/**
1478 * @opcode 0x12
1479 * @opcodesub !11 mr/reg
1480 * @oppfx 0x66
1481 * @opcpuid sse2
1482 * @opgroup og_sse2_pcksclr_datamove
1483 * @opxcpttype 5
1484 * @optest op1=1 op2=2 -> op1=2
1485 * @optest op1=0 op2=-42 -> op1=-42
1486 */
1487FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
1488{
1489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1490 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1491 {
1492 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1493
1494 IEM_MC_BEGIN(0, 2);
1495 IEM_MC_LOCAL(uint64_t, uSrc);
1496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1497
1498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1502
1503 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1504 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1505
1506 IEM_MC_ADVANCE_RIP();
1507 IEM_MC_END();
1508 return VINF_SUCCESS;
1509 }
1510
1511 /**
1512 * @opdone
1513 * @opmnemonic ud660f12m3
1514 * @opcode 0x12
1515 * @opcodesub 11 mr/reg
1516 * @oppfx 0x66
1517 * @opunused immediate
1518 * @opcpuid sse
1519 * @optest ->
1520 */
1521 return IEMOP_RAISE_INVALID_OPCODE();
1522}
1523
1524
1525/**
1526 * @opcode 0x12
1527 * @oppfx 0xf3
1528 * @opcpuid sse3
1529 * @opgroup og_sse3_pcksclr_datamove
1530 * @opxcpttype 4
1531 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
1532 * op1=0x00000002000000020000000100000001
1533 */
1534FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
1535{
1536 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1539 {
1540 /*
1541 * Register, register.
1542 */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEM_MC_BEGIN(2, 0);
1545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1546 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1547
1548 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1549 IEM_MC_PREPARE_SSE_USAGE();
1550
1551 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1552 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1554
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 IEM_MC_BEGIN(2, 2);
1564 IEM_MC_LOCAL(RTUINT128U, uSrc);
1565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1566 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1571 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1572 IEM_MC_PREPARE_SSE_USAGE();
1573
1574 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1575 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1576 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1577
1578 IEM_MC_ADVANCE_RIP();
1579 IEM_MC_END();
1580 }
1581 return VINF_SUCCESS;
1582}
1583
1584
1585/**
1586 * @opcode 0x12
1587 * @oppfx 0xf2
1588 * @opcpuid sse3
1589 * @opgroup og_sse3_pcksclr_datamove
1590 * @opxcpttype 5
1591 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
1592 * op1=0x22222222111111112222222211111111
1593 */
1594FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
1595{
1596 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1599 {
1600 /*
1601 * Register, register.
1602 */
1603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604 IEM_MC_BEGIN(2, 0);
1605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1606 IEM_MC_ARG(uint64_t, uSrc, 1);
1607
1608 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1609 IEM_MC_PREPARE_SSE_USAGE();
1610
1611 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1612 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1613 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 else
1619 {
1620 /*
1621 * Register, memory.
1622 */
1623 IEM_MC_BEGIN(2, 2);
1624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1625 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1626 IEM_MC_ARG(uint64_t, uSrc, 1);
1627
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1631 IEM_MC_PREPARE_SSE_USAGE();
1632
1633 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1634 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1635 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1636
1637 IEM_MC_ADVANCE_RIP();
1638 IEM_MC_END();
1639 }
1640 return VINF_SUCCESS;
1641}
1642
1643
1644/**
1645 * @opcode 0x13
1646 * @opcodesub !11 mr/reg
1647 * @oppfx none
1648 * @opcpuid sse
1649 * @opgroup og_sse_simdfp_datamove
1650 * @opxcpttype 5
1651 * @optest op1=1 op2=2 -> op1=2
1652 * @optest op1=0 op2=-42 -> op1=-42
1653 */
1654FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1658 {
1659 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1660
1661 IEM_MC_BEGIN(0, 2);
1662 IEM_MC_LOCAL(uint64_t, uSrc);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664
1665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1667 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1668 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1669
1670 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1671 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1672
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 return VINF_SUCCESS;
1676 }
1677
1678 /**
1679 * @opdone
1680 * @opmnemonic ud0f13m3
1681 * @opcode 0x13
1682 * @opcodesub 11 mr/reg
1683 * @oppfx none
1684 * @opunused immediate
1685 * @opcpuid sse
1686 * @optest ->
1687 */
1688 return IEMOP_RAISE_INVALID_OPCODE();
1689}
1690
1691
1692/**
1693 * @opcode 0x13
1694 * @opcodesub !11 mr/reg
1695 * @oppfx 0x66
1696 * @opcpuid sse2
1697 * @opgroup og_sse2_pcksclr_datamove
1698 * @opxcpttype 5
1699 * @optest op1=1 op2=2 -> op1=2
1700 * @optest op1=0 op2=-42 -> op1=-42
1701 */
1702FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
1703{
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1706 {
1707 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1708 IEM_MC_BEGIN(0, 2);
1709 IEM_MC_LOCAL(uint64_t, uSrc);
1710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1711
1712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1716
1717 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1718 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1719
1720 IEM_MC_ADVANCE_RIP();
1721 IEM_MC_END();
1722 return VINF_SUCCESS;
1723 }
1724
1725 /**
1726 * @opdone
1727 * @opmnemonic ud660f13m3
1728 * @opcode 0x13
1729 * @opcodesub 11 mr/reg
1730 * @oppfx 0x66
1731 * @opunused immediate
1732 * @opcpuid sse
1733 * @optest ->
1734 */
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opmnemonic udf30f13
1741 * @opcode 0x13
1742 * @oppfx 0xf3
1743 * @opunused intel-modrm
1744 * @opcpuid sse
1745 * @optest ->
1746 * @opdone
1747 */
1748
1749/**
1750 * @opmnemonic udf20f13
1751 * @opcode 0x13
1752 * @oppfx 0xf2
1753 * @opunused intel-modrm
1754 * @opcpuid sse
1755 * @optest ->
1756 * @opdone
1757 */
1758
1759/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
1760FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx);
1761/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
1762FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx);
1763
1764/**
1765 * @opdone
1766 * @opmnemonic udf30f14
1767 * @opcode 0x14
1768 * @oppfx 0xf3
1769 * @opunused intel-modrm
1770 * @opcpuid sse
1771 * @optest ->
1772 * @opdone
1773 */
1774
1775/**
1776 * @opmnemonic udf20f14
1777 * @opcode 0x14
1778 * @oppfx 0xf2
1779 * @opunused intel-modrm
1780 * @opcpuid sse
1781 * @optest ->
1782 * @opdone
1783 */
1784
1785/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
1786FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx);
1787/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
1788FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx);
1789/* Opcode 0xf3 0x0f 0x15 - invalid */
1790/* Opcode 0xf2 0x0f 0x15 - invalid */
1791
1792/**
1793 * @opdone
1794 * @opmnemonic udf30f15
1795 * @opcode 0x15
1796 * @oppfx 0xf3
1797 * @opunused intel-modrm
1798 * @opcpuid sse
1799 * @optest ->
1800 * @opdone
1801 */
1802
1803/**
1804 * @opmnemonic udf20f15
1805 * @opcode 0x15
1806 * @oppfx 0xf2
1807 * @opunused intel-modrm
1808 * @opcpuid sse
1809 * @optest ->
1810 * @opdone
1811 */
1812
1813FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
1814{
1815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1817 {
1818 /**
1819 * @opcode 0x16
1820 * @opcodesub 11 mr/reg
1821 * @oppfx none
1822 * @opcpuid sse
1823 * @opgroup og_sse_simdfp_datamove
1824 * @opxcpttype 5
1825 * @optest op1=1 op2=2 -> op1=2
1826 * @optest op1=0 op2=-42 -> op1=-42
1827 */
1828 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1829
1830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1831 IEM_MC_BEGIN(0, 1);
1832 IEM_MC_LOCAL(uint64_t, uSrc);
1833
1834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1836 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1837 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 /**
1845 * @opdone
1846 * @opcode 0x16
1847 * @opcodesub !11 mr/reg
1848 * @oppfx none
1849 * @opcpuid sse
1850 * @opgroup og_sse_simdfp_datamove
1851 * @opxcpttype 5
1852 * @optest op1=1 op2=2 -> op1=2
1853 * @optest op1=0 op2=-42 -> op1=-42
1854 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
1855 */
1856 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1857
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint64_t, uSrc);
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1866
1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1868 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1869
1870 IEM_MC_ADVANCE_RIP();
1871 IEM_MC_END();
1872 }
1873 return VINF_SUCCESS;
1874}
1875
1876
1877/**
1878 * @opcode 0x16
1879 * @opcodesub !11 mr/reg
1880 * @oppfx 0x66
1881 * @opcpuid sse2
1882 * @opgroup og_sse2_pcksclr_datamove
1883 * @opxcpttype 5
1884 * @optest op1=1 op2=2 -> op1=2
1885 * @optest op1=0 op2=-42 -> op1=-42
1886 */
1887FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
1888{
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1891 {
1892 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, uSrc);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1896
1897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1899 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1901
1902 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_STORE_XREG_HI_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1904
1905 IEM_MC_ADVANCE_RIP();
1906 IEM_MC_END();
1907 return VINF_SUCCESS;
1908 }
1909
1910 /**
1911 * @opdone
1912 * @opmnemonic ud660f16m3
1913 * @opcode 0x16
1914 * @opcodesub 11 mr/reg
1915 * @oppfx 0x66
1916 * @opunused immediate
1917 * @opcpuid sse
1918 * @optest ->
1919 */
1920 return IEMOP_RAISE_INVALID_OPCODE();
1921}
1922
1923
1924/**
1925 * @opcode 0x16
1926 * @oppfx 0xf3
1927 * @opcpuid sse3
1928 * @opgroup og_sse3_pcksclr_datamove
1929 * @opxcpttype 4
1930 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
1931 * op1=0x00000002000000020000000100000001
1932 */
1933FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
1934{
1935 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1938 {
1939 /*
1940 * Register, register.
1941 */
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 IEM_MC_BEGIN(2, 0);
1944 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1945 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1946
1947 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1948 IEM_MC_PREPARE_SSE_USAGE();
1949
1950 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1952 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1953
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 }
1957 else
1958 {
1959 /*
1960 * Register, memory.
1961 */
1962 IEM_MC_BEGIN(2, 2);
1963 IEM_MC_LOCAL(RTUINT128U, uSrc);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1965 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1966 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1967
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1971 IEM_MC_PREPARE_SSE_USAGE();
1972
1973 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1974 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1975 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1976
1977 IEM_MC_ADVANCE_RIP();
1978 IEM_MC_END();
1979 }
1980 return VINF_SUCCESS;
1981}
1982
1983/**
1984 * @opdone
1985 * @opmnemonic udf30f16
1986 * @opcode 0x16
1987 * @oppfx 0xf2
1988 * @opunused intel-modrm
1989 * @opcpuid sse
1990 * @optest ->
1991 * @opdone
1992 */
1993
1994
1995/**
1996 * @opcode 0x17
1997 * @opcodesub !11 mr/reg
1998 * @oppfx none
1999 * @opcpuid sse
2000 * @opgroup og_sse_simdfp_datamove
2001 * @opxcpttype 5
2002 * @optest op1=1 op2=2 -> op1=2
2003 * @optest op1=0 op2=-42 -> op1=-42
2004 */
2005FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2006{
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2009 {
2010 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2011
2012 IEM_MC_BEGIN(0, 2);
2013 IEM_MC_LOCAL(uint64_t, uSrc);
2014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015
2016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2020
2021 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2022 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 return VINF_SUCCESS;
2027 }
2028
2029 /**
2030 * @opdone
2031 * @opmnemonic ud0f17m3
2032 * @opcode 0x17
2033 * @opcodesub 11 mr/reg
2034 * @oppfx none
2035 * @opunused immediate
2036 * @opcpuid sse
2037 * @optest ->
2038 */
2039 return IEMOP_RAISE_INVALID_OPCODE();
2040}
2041
2042
2043/**
2044 * @opcode 0x17
2045 * @opcodesub !11 mr/reg
2046 * @oppfx 0x66
2047 * @opcpuid sse2
2048 * @opgroup og_sse2_pcksclr_datamove
2049 * @opxcpttype 5
2050 * @optest op1=1 op2=2 -> op1=2
2051 * @optest op1=0 op2=-42 -> op1=-42
2052 */
2053FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2054{
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2059
2060 IEM_MC_BEGIN(0, 2);
2061 IEM_MC_LOCAL(uint64_t, uSrc);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2068
2069 IEM_MC_FETCH_XREG_HI_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2070 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 return VINF_SUCCESS;
2075 }
2076
2077 /**
2078 * @opdone
2079 * @opmnemonic ud660f17m3
2080 * @opcode 0x17
2081 * @opcodesub 11 mr/reg
2082 * @oppfx 0x66
2083 * @opunused immediate
2084 * @opcpuid sse
2085 * @optest ->
2086 */
2087 return IEMOP_RAISE_INVALID_OPCODE();
2088}
2089
2090
2091/**
2092 * @opdone
2093 * @opmnemonic udf30f17
2094 * @opcode 0x17
2095 * @oppfx 0xf3
2096 * @opunused intel-modrm
2097 * @opcpuid sse
2098 * @optest ->
2099 * @opdone
2100 */
2101
2102/**
2103 * @opmnemonic udf20f17
2104 * @opcode 0x17
2105 * @oppfx 0xf2
2106 * @opunused intel-modrm
2107 * @opcpuid sse
2108 * @optest ->
2109 * @opdone
2110 */
2111
2112
2113/** Opcode 0x0f 0x18. */
2114FNIEMOP_DEF(iemOp_prefetch_Grp16)
2115{
2116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2118 {
2119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2120 {
2121 case 4: /* Aliased to /0 for the time being according to AMD. */
2122 case 5: /* Aliased to /0 for the time being according to AMD. */
2123 case 6: /* Aliased to /0 for the time being according to AMD. */
2124 case 7: /* Aliased to /0 for the time being according to AMD. */
2125 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2126 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2127 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2128 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2130 }
2131
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2136 /* Currently a NOP. */
2137 NOREF(GCPtrEffSrc);
2138 IEM_MC_ADVANCE_RIP();
2139 IEM_MC_END();
2140 return VINF_SUCCESS;
2141 }
2142
2143 return IEMOP_RAISE_INVALID_OPCODE();
2144}
2145
2146
2147/** Opcode 0x0f 0x19..0x1f. */
2148FNIEMOP_DEF(iemOp_nop_Ev)
2149{
2150 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
2151 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2153 {
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 0);
2156 IEM_MC_ADVANCE_RIP();
2157 IEM_MC_END();
2158 }
2159 else
2160 {
2161 IEM_MC_BEGIN(0, 1);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 /* Currently a NOP. */
2166 NOREF(GCPtrEffSrc);
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x20. */
2175FNIEMOP_DEF(iemOp_mov_Rd_Cd)
2176{
2177 /* mod is ignored, as is operand size overrides. */
2178 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
2179 IEMOP_HLP_MIN_386();
2180 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2181 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2182 else
2183 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2184
2185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2186 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2187 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2188 {
2189 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2190 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2191 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2192 iCrReg |= 8;
2193 }
2194 switch (iCrReg)
2195 {
2196 case 0: case 2: case 3: case 4: case 8:
2197 break;
2198 default:
2199 return IEMOP_RAISE_INVALID_OPCODE();
2200 }
2201 IEMOP_HLP_DONE_DECODING();
2202
2203 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
2204}
2205
2206
2207/** Opcode 0x0f 0x21. */
2208FNIEMOP_DEF(iemOp_mov_Rd_Dd)
2209{
2210 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
2211 IEMOP_HLP_MIN_386();
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
2217 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
2218 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
2219}
2220
2221
2222/** Opcode 0x0f 0x22. */
2223FNIEMOP_DEF(iemOp_mov_Cd_Rd)
2224{
2225 /* mod is ignored, as is operand size overrides. */
2226 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
2227 IEMOP_HLP_MIN_386();
2228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2229 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2230 else
2231 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
2232
2233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2234 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
2235 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
2236 {
2237 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
2238 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
2239 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
2240 iCrReg |= 8;
2241 }
2242 switch (iCrReg)
2243 {
2244 case 0: case 2: case 3: case 4: case 8:
2245 break;
2246 default:
2247 return IEMOP_RAISE_INVALID_OPCODE();
2248 }
2249 IEMOP_HLP_DONE_DECODING();
2250
2251 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2252}
2253
2254
2255/** Opcode 0x0f 0x23. */
2256FNIEMOP_DEF(iemOp_mov_Dd_Rd)
2257{
2258 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
2259 IEMOP_HLP_MIN_386();
2260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
2265 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
2266 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
2267}
2268
2269
2270/** Opcode 0x0f 0x24. */
2271FNIEMOP_DEF(iemOp_mov_Rd_Td)
2272{
2273 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
2274 /** @todo works on 386 and 486. */
2275 /* The RM byte is not considered, see testcase. */
2276 return IEMOP_RAISE_INVALID_OPCODE();
2277}
2278
2279
2280/** Opcode 0x0f 0x26. */
2281FNIEMOP_DEF(iemOp_mov_Td_Rd)
2282{
2283 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
2284 /** @todo works on 386 and 486. */
2285 /* The RM byte is not considered, see testcase. */
2286 return IEMOP_RAISE_INVALID_OPCODE();
2287}
2288
2289
2290/**
2291 * @opcode 0x28
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 1
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
2300{
2301 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2304 {
2305 /*
2306 * Register, register.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2313 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2314 IEM_MC_ADVANCE_RIP();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP();
2335 IEM_MC_END();
2336 }
2337 return VINF_SUCCESS;
2338}
2339
2340/**
2341 * @opcode 0x28
2342 * @oppfx 66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 1
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
2350{
2351 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2363 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * Register, memory.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2378 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380
2381 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2382 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP();
2385 IEM_MC_END();
2386 }
2387 return VINF_SUCCESS;
2388}
2389
2390/* Opcode 0xf3 0x0f 0x28 - invalid */
2391/* Opcode 0xf2 0x0f 0x28 - invalid */
2392
2393/**
2394 * @opcode 0x29
2395 * @oppfx none
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 1
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-42 -> op1=-42
2401 */
2402FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2407 {
2408 /*
2409 * Register, register.
2410 */
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412 IEM_MC_BEGIN(0, 0);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2415 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2416 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * Memory, register.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2435 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443/**
2444 * @opcode 0x29
2445 * @oppfx 66
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 1
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2457 {
2458 /*
2459 * Register, register.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_BEGIN(0, 0);
2463 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2464 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2465 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2466 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /*
2473 * Memory, register.
2474 */
2475 IEM_MC_BEGIN(0, 2);
2476 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2483
2484 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2485 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 return VINF_SUCCESS;
2491}
2492
2493/* Opcode 0xf3 0x0f 0x29 - invalid */
2494/* Opcode 0xf2 0x0f 0x29 - invalid */
2495
2496
2497/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2498FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2499/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2500FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2501/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2502FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
2503/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2504FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
2505
2506
2507/**
2508 * @opcode 0x2b
2509 * @opcodesub !11 mr/reg
2510 * @oppfx none
2511 * @opcpuid sse
2512 * @opgroup og_sse1_cachect
2513 * @opxcpttype 1
2514 * @optest op1=1 op2=2 -> op1=2
2515 * @optest op1=0 op2=-42 -> op1=-42
2516 */
2517FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
2518{
2519 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2522 {
2523 /*
2524 * memory, register.
2525 */
2526 IEM_MC_BEGIN(0, 2);
2527 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529
2530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2534
2535 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP();
2539 IEM_MC_END();
2540 }
2541 /* The register, register encoding is invalid. */
2542 else
2543 return IEMOP_RAISE_INVALID_OPCODE();
2544 return VINF_SUCCESS;
2545}
2546
2547/**
2548 * @opcode 0x2b
2549 * @opcodesub !11 mr/reg
2550 * @oppfx 0x66
2551 * @opcpuid sse2
2552 * @opgroup og_sse2_cachect
2553 * @opxcpttype 1
2554 * @optest op1=1 op2=2 -> op1=2
2555 * @optest op1=0 op2=-42 -> op1=-42
2556 */
2557FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
2558{
2559 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2561 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * memory, register.
2565 */
2566 IEM_MC_BEGIN(0, 2);
2567 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2574
2575 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2576 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2577
2578 IEM_MC_ADVANCE_RIP();
2579 IEM_MC_END();
2580 }
2581 /* The register, register encoding is invalid. */
2582 else
2583 return IEMOP_RAISE_INVALID_OPCODE();
2584 return VINF_SUCCESS;
2585}
2586/* Opcode 0xf3 0x0f 0x2b - invalid */
2587/* Opcode 0xf2 0x0f 0x2b - invalid */
2588
2589
2590/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2591FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2592/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2593FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2594/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
2595FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
2596/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
2597FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
2598
2599/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2600FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2601/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2602FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
2604FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
2605/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
2606FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
2607
2608/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
2609FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
2610/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
2611FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
2612/* Opcode 0xf3 0x0f 0x2e - invalid */
2613/* Opcode 0xf2 0x0f 0x2e - invalid */
2614
2615/** Opcode 0x0f 0x2f - comiss Vss, Wss */
2616FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
2617/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
2618FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
2619/* Opcode 0xf3 0x0f 0x2f - invalid */
2620/* Opcode 0xf2 0x0f 0x2f - invalid */
2621
2622/** Opcode 0x0f 0x30. */
2623FNIEMOP_DEF(iemOp_wrmsr)
2624{
2625 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2628}
2629
2630
2631/** Opcode 0x0f 0x31. */
2632FNIEMOP_DEF(iemOp_rdtsc)
2633{
2634 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2637}
2638
2639
2640/** Opcode 0x0f 0x33. */
2641FNIEMOP_DEF(iemOp_rdmsr)
2642{
2643 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2646}
2647
2648
2649/** Opcode 0x0f 0x34. */
2650FNIEMOP_DEF(iemOp_rdpmc)
2651{
2652 IEMOP_MNEMONIC(rdpmc, "rdpmc");
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
2655}
2656
2657
2658/** Opcode 0x0f 0x34. */
2659FNIEMOP_STUB(iemOp_sysenter);
2660/** Opcode 0x0f 0x35. */
2661FNIEMOP_STUB(iemOp_sysexit);
2662/** Opcode 0x0f 0x37. */
2663FNIEMOP_STUB(iemOp_getsec);
2664
2665
2666/** Opcode 0x0f 0x38. */
2667FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
2668{
2669#ifdef IEM_WITH_THREE_0F_38
2670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2671 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2672#else
2673 IEMOP_BITCH_ABOUT_STUB();
2674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2675#endif
2676}
2677
2678
2679/** Opcode 0x0f 0x3a. */
2680FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
2681{
2682#ifdef IEM_WITH_THREE_0F_3A
2683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2684 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
2685#else
2686 IEMOP_BITCH_ABOUT_STUB();
2687 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2688#endif
2689}
2690
2691
2692/**
2693 * Implements a conditional move.
2694 *
2695 * Wish there was an obvious way to do this where we could share and reduce
2696 * code bloat.
2697 *
2698 * @param a_Cnd The conditional "microcode" operation.
2699 */
2700#define CMOV_X(a_Cnd) \
2701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2703 { \
2704 switch (pVCpu->iem.s.enmEffOpSize) \
2705 { \
2706 case IEMMODE_16BIT: \
2707 IEM_MC_BEGIN(0, 1); \
2708 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2709 a_Cnd { \
2710 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2711 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2712 } IEM_MC_ENDIF(); \
2713 IEM_MC_ADVANCE_RIP(); \
2714 IEM_MC_END(); \
2715 return VINF_SUCCESS; \
2716 \
2717 case IEMMODE_32BIT: \
2718 IEM_MC_BEGIN(0, 1); \
2719 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2720 a_Cnd { \
2721 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2723 } IEM_MC_ELSE() { \
2724 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2725 } IEM_MC_ENDIF(); \
2726 IEM_MC_ADVANCE_RIP(); \
2727 IEM_MC_END(); \
2728 return VINF_SUCCESS; \
2729 \
2730 case IEMMODE_64BIT: \
2731 IEM_MC_BEGIN(0, 1); \
2732 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2733 a_Cnd { \
2734 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2736 } IEM_MC_ENDIF(); \
2737 IEM_MC_ADVANCE_RIP(); \
2738 IEM_MC_END(); \
2739 return VINF_SUCCESS; \
2740 \
2741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2742 } \
2743 } \
2744 else \
2745 { \
2746 switch (pVCpu->iem.s.enmEffOpSize) \
2747 { \
2748 case IEMMODE_16BIT: \
2749 IEM_MC_BEGIN(0, 2); \
2750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2751 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2753 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2754 a_Cnd { \
2755 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2756 } IEM_MC_ENDIF(); \
2757 IEM_MC_ADVANCE_RIP(); \
2758 IEM_MC_END(); \
2759 return VINF_SUCCESS; \
2760 \
2761 case IEMMODE_32BIT: \
2762 IEM_MC_BEGIN(0, 2); \
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2764 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2766 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2767 a_Cnd { \
2768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2769 } IEM_MC_ELSE() { \
2770 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2771 } IEM_MC_ENDIF(); \
2772 IEM_MC_ADVANCE_RIP(); \
2773 IEM_MC_END(); \
2774 return VINF_SUCCESS; \
2775 \
2776 case IEMMODE_64BIT: \
2777 IEM_MC_BEGIN(0, 2); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2781 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2782 a_Cnd { \
2783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2784 } IEM_MC_ENDIF(); \
2785 IEM_MC_ADVANCE_RIP(); \
2786 IEM_MC_END(); \
2787 return VINF_SUCCESS; \
2788 \
2789 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2790 } \
2791 } do {} while (0)
2792
2793
2794
2795/** Opcode 0x0f 0x40. */
2796FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2797{
2798 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2799 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2800}
2801
2802
2803/** Opcode 0x0f 0x41. */
2804FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2805{
2806 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2807 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2808}
2809
2810
2811/** Opcode 0x0f 0x42. */
2812FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2813{
2814 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2815 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2816}
2817
2818
2819/** Opcode 0x0f 0x43. */
2820FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2821{
2822 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2823 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2824}
2825
2826
2827/** Opcode 0x0f 0x44. */
2828FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2829{
2830 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2831 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2832}
2833
2834
2835/** Opcode 0x0f 0x45. */
2836FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2837{
2838 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2839 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2840}
2841
2842
2843/** Opcode 0x0f 0x46. */
2844FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2845{
2846 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2847 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2848}
2849
2850
2851/** Opcode 0x0f 0x47. */
2852FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2853{
2854 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2855 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2856}
2857
2858
2859/** Opcode 0x0f 0x48. */
2860FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2861{
2862 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2864}
2865
2866
2867/** Opcode 0x0f 0x49. */
2868FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2869{
2870 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2872}
2873
2874
2875/** Opcode 0x0f 0x4a. */
2876FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2877{
2878 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2880}
2881
2882
2883/** Opcode 0x0f 0x4b. */
2884FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2885{
2886 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2888}
2889
2890
2891/** Opcode 0x0f 0x4c. */
2892FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2893{
2894 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2895 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2896}
2897
2898
2899/** Opcode 0x0f 0x4d. */
2900FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2901{
2902 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2903 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2904}
2905
2906
2907/** Opcode 0x0f 0x4e. */
2908FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2909{
2910 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2911 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2912}
2913
2914
2915/** Opcode 0x0f 0x4f. */
2916FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2917{
2918 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2920}
2921
2922#undef CMOV_X
2923
2924/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
2925FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
2926/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
2927FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
2928/* Opcode 0xf3 0x0f 0x50 - invalid */
2929/* Opcode 0xf2 0x0f 0x50 - invalid */
2930
2931/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
2932FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
2933/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
2934FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
2935/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
2936FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
2937/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
2938FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
2939
2940/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
2941FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
2942/* Opcode 0x66 0x0f 0x52 - invalid */
2943/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
2944FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
2945/* Opcode 0xf2 0x0f 0x52 - invalid */
2946
2947/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
2948FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
2949/* Opcode 0x66 0x0f 0x53 - invalid */
2950/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
2951FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
2952/* Opcode 0xf2 0x0f 0x53 - invalid */
2953
2954/** Opcode 0x0f 0x54 - andps Vps, Wps */
2955FNIEMOP_STUB(iemOp_andps_Vps_Wps);
2956/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
2957FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd);
2958/* Opcode 0xf3 0x0f 0x54 - invalid */
2959/* Opcode 0xf2 0x0f 0x54 - invalid */
2960
2961/** Opcode 0x0f 0x55 - andnps Vps, Wps */
2962FNIEMOP_STUB(iemOp_andnps_Vps_Wps);
2963/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
2964FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd);
2965/* Opcode 0xf3 0x0f 0x55 - invalid */
2966/* Opcode 0xf2 0x0f 0x55 - invalid */
2967
2968/** Opcode 0x0f 0x56 - orps Vps, Wps */
2969FNIEMOP_STUB(iemOp_orps_Vps_Wps);
2970/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
2971FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd);
2972/* Opcode 0xf3 0x0f 0x56 - invalid */
2973/* Opcode 0xf2 0x0f 0x56 - invalid */
2974
2975/** Opcode 0x0f 0x57 - xorps Vps, Wps */
2976FNIEMOP_STUB(iemOp_xorps_Vps_Wps);
2977/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
2978FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd);
2979/* Opcode 0xf3 0x0f 0x57 - invalid */
2980/* Opcode 0xf2 0x0f 0x57 - invalid */
2981
2982/** Opcode 0x0f 0x58 - addps Vps, Wps */
2983FNIEMOP_STUB(iemOp_addps_Vps_Wps);
2984/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
2985FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd);
2986/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
2987FNIEMOP_STUB(iemOp_addss_Vss_Wss);
2988/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
2989FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
2990
2991/** Opcode 0x0f 0x59 - mulps Vps, Wps */
2992FNIEMOP_STUB(iemOp_mulps_Vps_Wps);
2993/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
2994FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd);
2995/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
2996FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
2997/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
2998FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
2999
3000/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3001FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3002/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3003FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3004/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3005FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3006/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3007FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3008
3009/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3010FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3011/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3012FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3013/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3014FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3015/* Opcode 0xf2 0x0f 0x5b - invalid */
3016
3017/** Opcode 0x0f 0x5c - subps Vps, Wps */
3018FNIEMOP_STUB(iemOp_subps_Vps_Wps);
3019/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3020FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd);
3021/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
3022FNIEMOP_STUB(iemOp_subss_Vss_Wss);
3023/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
3024FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
3025
3026/** Opcode 0x0f 0x5d - minps Vps, Wps */
3027FNIEMOP_STUB(iemOp_minps_Vps_Wps);
3028/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
3029FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
3030/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
3031FNIEMOP_STUB(iemOp_minss_Vss_Wss);
3032/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
3033FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
3034
3035/** Opcode 0x0f 0x5e - divps Vps, Wps */
3036FNIEMOP_STUB(iemOp_divps_Vps_Wps);
3037/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
3038FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
3039/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
3040FNIEMOP_STUB(iemOp_divss_Vss_Wss);
3041/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
3042FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
3043
3044/** Opcode 0x0f 0x5f - maxps Vps, Wps */
3045FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
3046/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
3047FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
3048/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
3049FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
3050/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
3051FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
3052
3053/**
3054 * Common worker for MMX instructions on the forms:
3055 * pxxxx mm1, mm2/mem32
3056 *
3057 * The 2nd operand is the first half of a register, which in the memory case
3058 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3059 * memory accessed for MMX.
3060 *
3061 * Exceptions type 4.
3062 */
3063FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3064{
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3072 IEM_MC_BEGIN(2, 0);
3073 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3074 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3075 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3078 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3079 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(2, 2);
3089 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3090 IEM_MC_LOCAL(uint64_t, uSrc);
3091 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3098
3099 IEM_MC_PREPARE_SSE_USAGE();
3100 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107}
3108
3109
3110/**
3111 * Common worker for SSE2 instructions on the forms:
3112 * pxxxx xmm1, xmm2/mem128
3113 *
3114 * The 2nd operand is the first half of a register, which in the memory case
3115 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
3116 * memory accessed for MMX.
3117 *
3118 * Exceptions type 4.
3119 */
3120FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
3121{
3122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3123 if (!pImpl->pfnU64)
3124 return IEMOP_RAISE_INVALID_OPCODE();
3125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3131 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(2, 0);
3134 IEM_MC_ARG(uint64_t *, pDst, 0);
3135 IEM_MC_ARG(uint32_t const *, pSrc, 1);
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_PREPARE_FPU_USAGE();
3138 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3140 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /*
3147 * Register, memory.
3148 */
3149 IEM_MC_BEGIN(2, 2);
3150 IEM_MC_ARG(uint64_t *, pDst, 0);
3151 IEM_MC_LOCAL(uint32_t, uSrc);
3152 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
3153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3154
3155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3158 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3159
3160 IEM_MC_PREPARE_FPU_USAGE();
3161 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3163
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
3172FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
3173{
3174 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
3175 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3176}
3177
3178/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
3179FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
3180{
3181 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx");
3182 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
3183}
3184
3185/* Opcode 0xf3 0x0f 0x60 - invalid */
3186
3187
3188/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
3189FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
3190{
3191 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
3192 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3193}
3194
3195/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
3196FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
3197{
3198 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx");
3199 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
3200}
3201
3202/* Opcode 0xf3 0x0f 0x61 - invalid */
3203
3204
3205/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
3206FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
3207{
3208 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
3209 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
3210}
3211
3212/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
3213FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
3214{
3215 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx");
3216 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
3217}
3218
3219/* Opcode 0xf3 0x0f 0x62 - invalid */
3220
3221
3222
3223/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
3224FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
3225/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
3226FNIEMOP_STUB(iemOp_packsswb_Vx_Wx);
3227/* Opcode 0xf3 0x0f 0x63 - invalid */
3228
3229/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
3230FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
3231/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
3232FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx);
3233/* Opcode 0xf3 0x0f 0x64 - invalid */
3234
3235/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
3236FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
3237/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
3238FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx);
3239/* Opcode 0xf3 0x0f 0x65 - invalid */
3240
3241/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
3242FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
3243/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
3244FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx);
3245/* Opcode 0xf3 0x0f 0x66 - invalid */
3246
3247/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
3248FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
3249/** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */
3250FNIEMOP_STUB(iemOp_packuswb_Vx_W);
3251/* Opcode 0xf3 0x0f 0x67 - invalid */
3252
3253
3254/**
3255 * Common worker for MMX instructions on the form:
3256 * pxxxx mm1, mm2/mem64
3257 *
3258 * The 2nd operand is the second half of a register, which in the memory case
3259 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3260 * where it may read the full 128 bits or only the upper 64 bits.
3261 *
3262 * Exceptions type 4.
3263 */
3264FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3265{
3266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3267 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE());
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(2, 0);
3277 IEM_MC_ARG(uint64_t *, pDst, 0);
3278 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3280 IEM_MC_PREPARE_FPU_USAGE();
3281 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3282 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3283 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3284 IEM_MC_ADVANCE_RIP();
3285 IEM_MC_END();
3286 }
3287 else
3288 {
3289 /*
3290 * Register, memory.
3291 */
3292 IEM_MC_BEGIN(2, 2);
3293 IEM_MC_ARG(uint64_t *, pDst, 0);
3294 IEM_MC_LOCAL(uint64_t, uSrc);
3295 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3302
3303 IEM_MC_PREPARE_FPU_USAGE();
3304 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3305 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3306
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/**
3315 * Common worker for SSE2 instructions on the form:
3316 * pxxxx xmm1, xmm2/mem128
3317 *
3318 * The 2nd operand is the second half of a register, which in the memory case
3319 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3320 * where it may read the full 128 bits or only the upper 64 bits.
3321 *
3322 * Exceptions type 4.
3323 */
3324FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3325{
3326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3328 {
3329 /*
3330 * Register, register.
3331 */
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333 IEM_MC_BEGIN(2, 0);
3334 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3335 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3337 IEM_MC_PREPARE_SSE_USAGE();
3338 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3341 IEM_MC_ADVANCE_RIP();
3342 IEM_MC_END();
3343 }
3344 else
3345 {
3346 /*
3347 * Register, memory.
3348 */
3349 IEM_MC_BEGIN(2, 2);
3350 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3351 IEM_MC_LOCAL(RTUINT128U, uSrc);
3352 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3354
3355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3358 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3359
3360 IEM_MC_PREPARE_SSE_USAGE();
3361 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3362 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3363
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 }
3367 return VINF_SUCCESS;
3368}
3369
3370
3371/** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */
3372FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd)
3373{
3374 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd");
3375 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3376}
3377
3378/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
3379FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
3380{
3381 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx");
3382 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
3383}
3384/* Opcode 0xf3 0x0f 0x68 - invalid */
3385
3386
3387/** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */
3388FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd)
3389{
3390 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd");
3391 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3392}
3393
3394/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
3395FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
3396{
3397 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx");
3398 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
3399
3400}
3401/* Opcode 0xf3 0x0f 0x69 - invalid */
3402
3403
3404/** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */
3405FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd)
3406{
3407 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd");
3408 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3409}
3410
3411/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */
3412FNIEMOP_DEF(iemOp_punpckhdq_Vx_W)
3413{
3414 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W");
3415 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
3416}
3417/* Opcode 0xf3 0x0f 0x6a - invalid */
3418
3419
3420/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
3421FNIEMOP_STUB(iemOp_packssdw_Pq_Qd);
3422/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
3423FNIEMOP_STUB(iemOp_packssdw_Vx_Wx);
3424/* Opcode 0xf3 0x0f 0x6b - invalid */
3425
3426
3427/* Opcode 0x0f 0x6c - invalid */
3428
3429/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
3430FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
3431{
3432 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx");
3433 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
3434}
3435
3436/* Opcode 0xf3 0x0f 0x6c - invalid */
3437/* Opcode 0xf2 0x0f 0x6c - invalid */
3438
3439
3440/* Opcode 0x0f 0x6d - invalid */
3441
3442/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */
3443FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W)
3444{
3445 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W");
3446 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
3447}
3448
3449/* Opcode 0xf3 0x0f 0x6d - invalid */
3450
3451
3452FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3456 {
3457 /**
3458 * @opcode 0x6e
3459 * @opcodesub rex.w=1
3460 * @oppfx none
3461 * @opcpuid mmx
3462 * @opgroup og_mmx_datamove
3463 * @opxcpttype 5
3464 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
3466 */
3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /* MMX, greg64 */
3471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3472 IEM_MC_BEGIN(0, 1);
3473 IEM_MC_LOCAL(uint64_t, u64Tmp);
3474
3475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3479 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3480 IEM_MC_FPU_TO_MMX_MODE();
3481
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /* MMX, [mem64] */
3488 IEM_MC_BEGIN(0, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490 IEM_MC_LOCAL(uint64_t, u64Tmp);
3491
3492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3494 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3496
3497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3498 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3499 IEM_MC_FPU_TO_MMX_MODE();
3500
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /**
3508 * @opdone
3509 * @opcode 0x6e
3510 * @opcodesub rex.w=0
3511 * @oppfx none
3512 * @opcpuid mmx
3513 * @opgroup og_mmx_datamove
3514 * @opxcpttype 5
3515 * @opfunction iemOp_movd_q_Pd_Ey
3516 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
3518 */
3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3521 {
3522 /* MMX, greg */
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 IEM_MC_BEGIN(0, 1);
3525 IEM_MC_LOCAL(uint64_t, u64Tmp);
3526
3527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3529
3530 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3531 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3532 IEM_MC_FPU_TO_MMX_MODE();
3533
3534 IEM_MC_ADVANCE_RIP();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /* MMX, [mem] */
3540 IEM_MC_BEGIN(0, 2);
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542 IEM_MC_LOCAL(uint32_t, u32Tmp);
3543
3544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3548
3549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3551 IEM_MC_FPU_TO_MMX_MODE();
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560/** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */
3561FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
3562{
3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3565 {
3566 /**
3567 * @opcode 0x6e
3568 * @opcodesub rex.w=1
3569 * @oppfx 0x66
3570 * @opcpuid sse2
3571 * @opgroup og_sse2_simdint_datamov
3572 * @opxcpttype 5
3573 * @optest 64-bit / op1=1 op2=2 -> op1=2
3574 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3575 */
3576 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3578 {
3579 /* XMM, greg64 */
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_BEGIN(0, 1);
3582 IEM_MC_LOCAL(uint64_t, u64Tmp);
3583
3584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3585 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3586
3587 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3588 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3589
3590 IEM_MC_ADVANCE_RIP();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 /* XMM, [mem64] */
3596 IEM_MC_BEGIN(0, 2);
3597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3598 IEM_MC_LOCAL(uint64_t, u64Tmp);
3599
3600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3603 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3604
3605 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3606 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3607
3608 IEM_MC_ADVANCE_RIP();
3609 IEM_MC_END();
3610 }
3611 }
3612 else
3613 {
3614 /**
3615 * @opdone
3616 * @opcode 0x6e
3617 * @opcodesub rex.w=0
3618 * @oppfx 0x66
3619 * @opcpuid sse2
3620 * @opgroup og_sse2_simdint_datamov
3621 * @opxcpttype 5
3622 * @opfunction iemOp_movd_q_Vy_Ey
3623 * @optest op1=1 op2=2 -> op1=2
3624 * @optest op1=0 op2=-42 -> op1=-42
3625 */
3626 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX);
3627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3628 {
3629 /* XMM, greg32 */
3630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3631 IEM_MC_BEGIN(0, 1);
3632 IEM_MC_LOCAL(uint32_t, u32Tmp);
3633
3634 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3635 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3636
3637 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3638 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3639
3640 IEM_MC_ADVANCE_RIP();
3641 IEM_MC_END();
3642 }
3643 else
3644 {
3645 /* XMM, [mem32] */
3646 IEM_MC_BEGIN(0, 2);
3647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3648 IEM_MC_LOCAL(uint32_t, u32Tmp);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3654
3655 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3656 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3657
3658 IEM_MC_ADVANCE_RIP();
3659 IEM_MC_END();
3660 }
3661 }
3662 return VINF_SUCCESS;
3663}
3664
3665/* Opcode 0xf3 0x0f 0x6e - invalid */
3666
3667
3668/** Opcode 0x0f 0x6f - movq Pq, Qq */
3669FNIEMOP_DEF(iemOp_movq_Pq_Qq)
3670{
3671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3672 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3674 {
3675 /*
3676 * Register, register.
3677 */
3678 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3679 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681 IEM_MC_BEGIN(0, 1);
3682 IEM_MC_LOCAL(uint64_t, u64Tmp);
3683 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3685 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3686 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3687 IEM_MC_ADVANCE_RIP();
3688 IEM_MC_END();
3689 }
3690 else
3691 {
3692 /*
3693 * Register, memory.
3694 */
3695 IEM_MC_BEGIN(0, 2);
3696 IEM_MC_LOCAL(uint64_t, u64Tmp);
3697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3698
3699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3701 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3702 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3703 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3704 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3705
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 }
3709 return VINF_SUCCESS;
3710}
3711
3712/** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */
3713FNIEMOP_DEF(iemOp_movdqa_Vx_Wx)
3714{
3715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3716 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3718 {
3719 /*
3720 * Register, register.
3721 */
3722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3723 IEM_MC_BEGIN(0, 0);
3724 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3725 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3726 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3727 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3728 IEM_MC_ADVANCE_RIP();
3729 IEM_MC_END();
3730 }
3731 else
3732 {
3733 /*
3734 * Register, memory.
3735 */
3736 IEM_MC_BEGIN(0, 2);
3737 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3739
3740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3742 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3743 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3744 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3745 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3746
3747 IEM_MC_ADVANCE_RIP();
3748 IEM_MC_END();
3749 }
3750 return VINF_SUCCESS;
3751}
3752
3753/** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */
3754FNIEMOP_DEF(iemOp_movdqu_Vx_Wx)
3755{
3756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3757 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3759 {
3760 /*
3761 * Register, register.
3762 */
3763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3764 IEM_MC_BEGIN(0, 0);
3765 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3766 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3767 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3768 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3769 IEM_MC_ADVANCE_RIP();
3770 IEM_MC_END();
3771 }
3772 else
3773 {
3774 /*
3775 * Register, memory.
3776 */
3777 IEM_MC_BEGIN(0, 2);
3778 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3780
3781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3785 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3786 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3787
3788 IEM_MC_ADVANCE_RIP();
3789 IEM_MC_END();
3790 }
3791 return VINF_SUCCESS;
3792}
3793
3794
3795/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
3796FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
3797{
3798 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3801 {
3802 /*
3803 * Register, register.
3804 */
3805 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3807
3808 IEM_MC_BEGIN(3, 0);
3809 IEM_MC_ARG(uint64_t *, pDst, 0);
3810 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3811 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3812 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3813 IEM_MC_PREPARE_FPU_USAGE();
3814 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3815 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3816 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3817 IEM_MC_ADVANCE_RIP();
3818 IEM_MC_END();
3819 }
3820 else
3821 {
3822 /*
3823 * Register, memory.
3824 */
3825 IEM_MC_BEGIN(3, 2);
3826 IEM_MC_ARG(uint64_t *, pDst, 0);
3827 IEM_MC_LOCAL(uint64_t, uSrc);
3828 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3830
3831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3832 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3833 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3836
3837 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3838 IEM_MC_PREPARE_FPU_USAGE();
3839 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3840 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3841
3842 IEM_MC_ADVANCE_RIP();
3843 IEM_MC_END();
3844 }
3845 return VINF_SUCCESS;
3846}
3847
3848/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
3849FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
3850{
3851 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib");
3852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3854 {
3855 /*
3856 * Register, register.
3857 */
3858 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3860
3861 IEM_MC_BEGIN(3, 0);
3862 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3863 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3864 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3865 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3866 IEM_MC_PREPARE_SSE_USAGE();
3867 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3868 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3869 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3870 IEM_MC_ADVANCE_RIP();
3871 IEM_MC_END();
3872 }
3873 else
3874 {
3875 /*
3876 * Register, memory.
3877 */
3878 IEM_MC_BEGIN(3, 2);
3879 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3880 IEM_MC_LOCAL(RTUINT128U, uSrc);
3881 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3883
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3885 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3886 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3888 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3889
3890 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3891 IEM_MC_PREPARE_SSE_USAGE();
3892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3893 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
3894
3895 IEM_MC_ADVANCE_RIP();
3896 IEM_MC_END();
3897 }
3898 return VINF_SUCCESS;
3899}
3900
3901/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
3902FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
3903{
3904 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib");
3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3907 {
3908 /*
3909 * Register, register.
3910 */
3911 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3913
3914 IEM_MC_BEGIN(3, 0);
3915 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3916 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3917 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3919 IEM_MC_PREPARE_SSE_USAGE();
3920 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3921 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3922 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3923 IEM_MC_ADVANCE_RIP();
3924 IEM_MC_END();
3925 }
3926 else
3927 {
3928 /*
3929 * Register, memory.
3930 */
3931 IEM_MC_BEGIN(3, 2);
3932 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3933 IEM_MC_LOCAL(RTUINT128U, uSrc);
3934 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3936
3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3938 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3941 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3942
3943 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3944 IEM_MC_PREPARE_SSE_USAGE();
3945 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3946 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
3947
3948 IEM_MC_ADVANCE_RIP();
3949 IEM_MC_END();
3950 }
3951 return VINF_SUCCESS;
3952}
3953
3954/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
3955FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
3956{
3957 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib");
3958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3960 {
3961 /*
3962 * Register, register.
3963 */
3964 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3966
3967 IEM_MC_BEGIN(3, 0);
3968 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3969 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3970 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3971 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3972 IEM_MC_PREPARE_SSE_USAGE();
3973 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3974 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3975 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
3976 IEM_MC_ADVANCE_RIP();
3977 IEM_MC_END();
3978 }
3979 else
3980 {
3981 /*
3982 * Register, memory.
3983 */
3984 IEM_MC_BEGIN(3, 2);
3985 IEM_MC_ARG(PRTUINT128U, pDst, 0);
3986 IEM_MC_LOCAL(RTUINT128U, uSrc);
3987 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3989
3990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3991 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3992 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3994 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3995
3996 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3997 IEM_MC_PREPARE_SSE_USAGE();
3998 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3999 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
4000
4001 IEM_MC_ADVANCE_RIP();
4002 IEM_MC_END();
4003 }
4004 return VINF_SUCCESS;
4005}
4006
4007
4008/** Opcode 0x0f 0x71 11/2. */
4009FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
4010
4011/** Opcode 0x66 0x0f 0x71 11/2. */
4012FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm);
4013
4014/** Opcode 0x0f 0x71 11/4. */
4015FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
4016
4017/** Opcode 0x66 0x0f 0x71 11/4. */
4018FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm);
4019
4020/** Opcode 0x0f 0x71 11/6. */
4021FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
4022
4023/** Opcode 0x66 0x0f 0x71 11/6. */
4024FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm);
4025
4026
4027/**
4028 * Group 12 jump table for register variant.
4029 */
4030IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4031{
4032 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4033 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4034 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4035 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4036 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4037 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4038 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4039 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4040};
4041AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4042
4043
4044/** Opcode 0x0f 0x71. */
4045FNIEMOP_DEF(iemOp_Grp12)
4046{
4047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4049 /* register, register */
4050 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4051 + pVCpu->iem.s.idxPrefix], bRm);
4052 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4053}
4054
4055
4056/** Opcode 0x0f 0x72 11/2. */
4057FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
4058
4059/** Opcode 0x66 0x0f 0x72 11/2. */
4060FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm);
4061
4062/** Opcode 0x0f 0x72 11/4. */
4063FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
4064
4065/** Opcode 0x66 0x0f 0x72 11/4. */
4066FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm);
4067
4068/** Opcode 0x0f 0x72 11/6. */
4069FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
4070
4071/** Opcode 0x66 0x0f 0x72 11/6. */
4072FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm);
4073
4074
4075/**
4076 * Group 13 jump table for register variant.
4077 */
4078IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
4079{
4080 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4081 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4082 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4083 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4084 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4085 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4086 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4087 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4088};
4089AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
4090
4091/** Opcode 0x0f 0x72. */
4092FNIEMOP_DEF(iemOp_Grp13)
4093{
4094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4096 /* register, register */
4097 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4098 + pVCpu->iem.s.idxPrefix], bRm);
4099 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4100}
4101
4102
4103/** Opcode 0x0f 0x73 11/2. */
4104FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
4105
4106/** Opcode 0x66 0x0f 0x73 11/2. */
4107FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm);
4108
4109/** Opcode 0x66 0x0f 0x73 11/3. */
4110FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT
4111
4112/** Opcode 0x0f 0x73 11/6. */
4113FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
4114
4115/** Opcode 0x66 0x0f 0x73 11/6. */
4116FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm);
4117
4118/** Opcode 0x66 0x0f 0x73 11/7. */
4119FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT
4120
4121/**
4122 * Group 14 jump table for register variant.
4123 */
4124IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
4125{
4126 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4127 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4128 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4129 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4130 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4131 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4132 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4133 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4134};
4135AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
4136
4137
4138/** Opcode 0x0f 0x73. */
4139FNIEMOP_DEF(iemOp_Grp14)
4140{
4141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 /* register, register */
4144 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
4145 + pVCpu->iem.s.idxPrefix], bRm);
4146 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4147}
4148
4149
4150/**
4151 * Common worker for MMX instructions on the form:
4152 * pxxx mm1, mm2/mem64
4153 */
4154FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4155{
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4158 {
4159 /*
4160 * Register, register.
4161 */
4162 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4163 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_BEGIN(2, 0);
4166 IEM_MC_ARG(uint64_t *, pDst, 0);
4167 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4168 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4169 IEM_MC_PREPARE_FPU_USAGE();
4170 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4171 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
4172 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4173 IEM_MC_ADVANCE_RIP();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /*
4179 * Register, memory.
4180 */
4181 IEM_MC_BEGIN(2, 2);
4182 IEM_MC_ARG(uint64_t *, pDst, 0);
4183 IEM_MC_LOCAL(uint64_t, uSrc);
4184 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4186
4187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4190 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4191
4192 IEM_MC_PREPARE_FPU_USAGE();
4193 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4194 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
4195
4196 IEM_MC_ADVANCE_RIP();
4197 IEM_MC_END();
4198 }
4199 return VINF_SUCCESS;
4200}
4201
4202
4203/**
4204 * Common worker for SSE2 instructions on the forms:
4205 * pxxx xmm1, xmm2/mem128
4206 *
4207 * Proper alignment of the 128-bit operand is enforced.
4208 * Exceptions type 4. SSE2 cpuid checks.
4209 */
4210FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
4211{
4212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4214 {
4215 /*
4216 * Register, register.
4217 */
4218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4219 IEM_MC_BEGIN(2, 0);
4220 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4221 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4223 IEM_MC_PREPARE_SSE_USAGE();
4224 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4225 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4226 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4227 IEM_MC_ADVANCE_RIP();
4228 IEM_MC_END();
4229 }
4230 else
4231 {
4232 /*
4233 * Register, memory.
4234 */
4235 IEM_MC_BEGIN(2, 2);
4236 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4237 IEM_MC_LOCAL(RTUINT128U, uSrc);
4238 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4240
4241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4243 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4244 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4245
4246 IEM_MC_PREPARE_SSE_USAGE();
4247 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4248 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4249
4250 IEM_MC_ADVANCE_RIP();
4251 IEM_MC_END();
4252 }
4253 return VINF_SUCCESS;
4254}
4255
4256
4257/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
4258FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
4259{
4260 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
4261 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4262}
4263
4264/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
4265FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
4266{
4267 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb");
4268 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
4269}
4270
4271/* Opcode 0xf3 0x0f 0x74 - invalid */
4272/* Opcode 0xf2 0x0f 0x74 - invalid */
4273
4274
4275/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
4276FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
4277{
4278 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
4279 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4280}
4281
4282/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
4283FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
4284{
4285 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw");
4286 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
4287}
4288
4289/* Opcode 0xf3 0x0f 0x75 - invalid */
4290/* Opcode 0xf2 0x0f 0x75 - invalid */
4291
4292
4293/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
4294FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
4295{
4296 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
4297 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4298}
4299
4300/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
4301FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
4302{
4303 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd");
4304 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
4305}
4306
4307/* Opcode 0xf3 0x0f 0x76 - invalid */
4308/* Opcode 0xf2 0x0f 0x76 - invalid */
4309
4310
4311/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
4312FNIEMOP_STUB(iemOp_emms);
4313/* Opcode 0x66 0x0f 0x77 - invalid */
4314/* Opcode 0xf3 0x0f 0x77 - invalid */
4315/* Opcode 0xf2 0x0f 0x77 - invalid */
4316
4317/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
4318FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
4319/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
4320FNIEMOP_STUB(iemOp_AmdGrp17);
4321/* Opcode 0xf3 0x0f 0x78 - invalid */
4322/* Opcode 0xf2 0x0f 0x78 - invalid */
4323
4324/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
4325FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
4326/* Opcode 0x66 0x0f 0x79 - invalid */
4327/* Opcode 0xf3 0x0f 0x79 - invalid */
4328/* Opcode 0xf2 0x0f 0x79 - invalid */
4329
4330/* Opcode 0x0f 0x7a - invalid */
4331/* Opcode 0x66 0x0f 0x7a - invalid */
4332/* Opcode 0xf3 0x0f 0x7a - invalid */
4333/* Opcode 0xf2 0x0f 0x7a - invalid */
4334
4335/* Opcode 0x0f 0x7b - invalid */
4336/* Opcode 0x66 0x0f 0x7b - invalid */
4337/* Opcode 0xf3 0x0f 0x7b - invalid */
4338/* Opcode 0xf2 0x0f 0x7b - invalid */
4339
4340/* Opcode 0x0f 0x7c - invalid */
4341/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
4342FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
4343/* Opcode 0xf3 0x0f 0x7c - invalid */
4344/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
4345FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
4346
4347/* Opcode 0x0f 0x7d - invalid */
4348/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
4349FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
4350/* Opcode 0xf3 0x0f 0x7d - invalid */
4351/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
4352FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
4353
4354
4355/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
4356FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
4357{
4358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4360 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
4361 else
4362 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
4363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4364 {
4365 /* greg, MMX */
4366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4367 IEM_MC_BEGIN(0, 1);
4368 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4369 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4370 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4371 {
4372 IEM_MC_LOCAL(uint64_t, u64Tmp);
4373 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4374 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4375 }
4376 else
4377 {
4378 IEM_MC_LOCAL(uint32_t, u32Tmp);
4379 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4380 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4381 }
4382 IEM_MC_ADVANCE_RIP();
4383 IEM_MC_END();
4384 }
4385 else
4386 {
4387 /* [mem], MMX */
4388 IEM_MC_BEGIN(0, 2);
4389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4390 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4393 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4394 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4395 {
4396 IEM_MC_LOCAL(uint64_t, u64Tmp);
4397 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4398 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4399 }
4400 else
4401 {
4402 IEM_MC_LOCAL(uint32_t, u32Tmp);
4403 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4404 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4405 }
4406 IEM_MC_ADVANCE_RIP();
4407 IEM_MC_END();
4408 }
4409 return VINF_SUCCESS;
4410}
4411
4412/** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */
4413FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
4414{
4415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4416 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4417 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
4418 else
4419 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
4420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4421 {
4422 /* greg, XMM */
4423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4424 IEM_MC_BEGIN(0, 1);
4425 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4426 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4427 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4428 {
4429 IEM_MC_LOCAL(uint64_t, u64Tmp);
4430 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4431 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
4432 }
4433 else
4434 {
4435 IEM_MC_LOCAL(uint32_t, u32Tmp);
4436 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4437 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
4438 }
4439 IEM_MC_ADVANCE_RIP();
4440 IEM_MC_END();
4441 }
4442 else
4443 {
4444 /* [mem], XMM */
4445 IEM_MC_BEGIN(0, 2);
4446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4447 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4450 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4451 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4452 {
4453 IEM_MC_LOCAL(uint64_t, u64Tmp);
4454 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4455 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4456 }
4457 else
4458 {
4459 IEM_MC_LOCAL(uint32_t, u32Tmp);
4460 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4461 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4462 }
4463 IEM_MC_ADVANCE_RIP();
4464 IEM_MC_END();
4465 }
4466 return VINF_SUCCESS;
4467}
4468
4469
4470/**
4471 * @opcode 0x7e
4472 * @opcodesub !11 mr/reg
4473 * @oppfx 0xf3
4474 * @opcpuid sse2
4475 * @opgroup og_sse2_pcksclr_datamove
4476 * @opxcpttype 5
4477 * @optest op1=1 op2=2 -> op1=2
4478 * @optest op1=0 op2=-42 -> op1=-42
4479 */
4480FNIEMOP_DEF(iemOp_movq_Vq_Wq)
4481{
4482 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4485 {
4486 /*
4487 * Register, register.
4488 */
4489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4490 IEM_MC_BEGIN(0, 2);
4491 IEM_MC_LOCAL(uint64_t, uSrc);
4492
4493 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4494 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4495
4496 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4497 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4498
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 else
4503 {
4504 /*
4505 * Memory, register.
4506 */
4507 IEM_MC_BEGIN(0, 2);
4508 IEM_MC_LOCAL(uint64_t, uSrc);
4509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4510
4511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4513 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4515
4516 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4517 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
4518
4519 IEM_MC_ADVANCE_RIP();
4520 IEM_MC_END();
4521 }
4522 return VINF_SUCCESS;
4523}
4524
4525/* Opcode 0xf2 0x0f 0x7e - invalid */
4526
4527
4528/** Opcode 0x0f 0x7f - movq Qq, Pq */
4529FNIEMOP_DEF(iemOp_movq_Qq_Pq)
4530{
4531 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
4532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4534 {
4535 /*
4536 * Register, register.
4537 */
4538 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
4539 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541 IEM_MC_BEGIN(0, 1);
4542 IEM_MC_LOCAL(uint64_t, u64Tmp);
4543 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4544 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4545 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4546 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
4547 IEM_MC_ADVANCE_RIP();
4548 IEM_MC_END();
4549 }
4550 else
4551 {
4552 /*
4553 * Register, memory.
4554 */
4555 IEM_MC_BEGIN(0, 2);
4556 IEM_MC_LOCAL(uint64_t, u64Tmp);
4557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4558
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4561 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4562 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
4563
4564 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4565 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4566
4567 IEM_MC_ADVANCE_RIP();
4568 IEM_MC_END();
4569 }
4570 return VINF_SUCCESS;
4571}
4572
4573/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
4574FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
4575{
4576 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx");
4577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4579 {
4580 /*
4581 * Register, register.
4582 */
4583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4584 IEM_MC_BEGIN(0, 0);
4585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4587 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4588 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4589 IEM_MC_ADVANCE_RIP();
4590 IEM_MC_END();
4591 }
4592 else
4593 {
4594 /*
4595 * Register, memory.
4596 */
4597 IEM_MC_BEGIN(0, 2);
4598 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4600
4601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4605
4606 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4607 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4608
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 }
4612 return VINF_SUCCESS;
4613}
4614
4615/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
4616FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
4617{
4618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4619 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx");
4620 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4621 {
4622 /*
4623 * Register, register.
4624 */
4625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4626 IEM_MC_BEGIN(0, 0);
4627 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4629 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
4630 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 }
4634 else
4635 {
4636 /*
4637 * Register, memory.
4638 */
4639 IEM_MC_BEGIN(0, 2);
4640 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4642
4643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4645 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4646 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4647
4648 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4649 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4650
4651 IEM_MC_ADVANCE_RIP();
4652 IEM_MC_END();
4653 }
4654 return VINF_SUCCESS;
4655}
4656
4657/* Opcode 0xf2 0x0f 0x7f - invalid */
4658
4659
4660
4661/** Opcode 0x0f 0x80. */
4662FNIEMOP_DEF(iemOp_jo_Jv)
4663{
4664 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
4665 IEMOP_HLP_MIN_386();
4666 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4667 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4668 {
4669 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4671
4672 IEM_MC_BEGIN(0, 0);
4673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4674 IEM_MC_REL_JMP_S16(i16Imm);
4675 } IEM_MC_ELSE() {
4676 IEM_MC_ADVANCE_RIP();
4677 } IEM_MC_ENDIF();
4678 IEM_MC_END();
4679 }
4680 else
4681 {
4682 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4684
4685 IEM_MC_BEGIN(0, 0);
4686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4687 IEM_MC_REL_JMP_S32(i32Imm);
4688 } IEM_MC_ELSE() {
4689 IEM_MC_ADVANCE_RIP();
4690 } IEM_MC_ENDIF();
4691 IEM_MC_END();
4692 }
4693 return VINF_SUCCESS;
4694}
4695
4696
4697/** Opcode 0x0f 0x81. */
4698FNIEMOP_DEF(iemOp_jno_Jv)
4699{
4700 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
4701 IEMOP_HLP_MIN_386();
4702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4703 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4704 {
4705 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4707
4708 IEM_MC_BEGIN(0, 0);
4709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4710 IEM_MC_ADVANCE_RIP();
4711 } IEM_MC_ELSE() {
4712 IEM_MC_REL_JMP_S16(i16Imm);
4713 } IEM_MC_ENDIF();
4714 IEM_MC_END();
4715 }
4716 else
4717 {
4718 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4720
4721 IEM_MC_BEGIN(0, 0);
4722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4723 IEM_MC_ADVANCE_RIP();
4724 } IEM_MC_ELSE() {
4725 IEM_MC_REL_JMP_S32(i32Imm);
4726 } IEM_MC_ENDIF();
4727 IEM_MC_END();
4728 }
4729 return VINF_SUCCESS;
4730}
4731
4732
4733/** Opcode 0x0f 0x82. */
4734FNIEMOP_DEF(iemOp_jc_Jv)
4735{
4736 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
4737 IEMOP_HLP_MIN_386();
4738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4739 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4740 {
4741 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4743
4744 IEM_MC_BEGIN(0, 0);
4745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4746 IEM_MC_REL_JMP_S16(i16Imm);
4747 } IEM_MC_ELSE() {
4748 IEM_MC_ADVANCE_RIP();
4749 } IEM_MC_ENDIF();
4750 IEM_MC_END();
4751 }
4752 else
4753 {
4754 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4756
4757 IEM_MC_BEGIN(0, 0);
4758 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4759 IEM_MC_REL_JMP_S32(i32Imm);
4760 } IEM_MC_ELSE() {
4761 IEM_MC_ADVANCE_RIP();
4762 } IEM_MC_ENDIF();
4763 IEM_MC_END();
4764 }
4765 return VINF_SUCCESS;
4766}
4767
4768
4769/** Opcode 0x0f 0x83. */
4770FNIEMOP_DEF(iemOp_jnc_Jv)
4771{
4772 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4773 IEMOP_HLP_MIN_386();
4774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4775 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4776 {
4777 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4779
4780 IEM_MC_BEGIN(0, 0);
4781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4782 IEM_MC_ADVANCE_RIP();
4783 } IEM_MC_ELSE() {
4784 IEM_MC_REL_JMP_S16(i16Imm);
4785 } IEM_MC_ENDIF();
4786 IEM_MC_END();
4787 }
4788 else
4789 {
4790 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4792
4793 IEM_MC_BEGIN(0, 0);
4794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4795 IEM_MC_ADVANCE_RIP();
4796 } IEM_MC_ELSE() {
4797 IEM_MC_REL_JMP_S32(i32Imm);
4798 } IEM_MC_ENDIF();
4799 IEM_MC_END();
4800 }
4801 return VINF_SUCCESS;
4802}
4803
4804
4805/** Opcode 0x0f 0x84. */
4806FNIEMOP_DEF(iemOp_je_Jv)
4807{
4808 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4809 IEMOP_HLP_MIN_386();
4810 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4811 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4812 {
4813 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815
4816 IEM_MC_BEGIN(0, 0);
4817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4818 IEM_MC_REL_JMP_S16(i16Imm);
4819 } IEM_MC_ELSE() {
4820 IEM_MC_ADVANCE_RIP();
4821 } IEM_MC_ENDIF();
4822 IEM_MC_END();
4823 }
4824 else
4825 {
4826 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828
4829 IEM_MC_BEGIN(0, 0);
4830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4831 IEM_MC_REL_JMP_S32(i32Imm);
4832 } IEM_MC_ELSE() {
4833 IEM_MC_ADVANCE_RIP();
4834 } IEM_MC_ENDIF();
4835 IEM_MC_END();
4836 }
4837 return VINF_SUCCESS;
4838}
4839
4840
4841/** Opcode 0x0f 0x85. */
4842FNIEMOP_DEF(iemOp_jne_Jv)
4843{
4844 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4845 IEMOP_HLP_MIN_386();
4846 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4848 {
4849 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851
4852 IEM_MC_BEGIN(0, 0);
4853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4854 IEM_MC_ADVANCE_RIP();
4855 } IEM_MC_ELSE() {
4856 IEM_MC_REL_JMP_S16(i16Imm);
4857 } IEM_MC_ENDIF();
4858 IEM_MC_END();
4859 }
4860 else
4861 {
4862 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4864
4865 IEM_MC_BEGIN(0, 0);
4866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4867 IEM_MC_ADVANCE_RIP();
4868 } IEM_MC_ELSE() {
4869 IEM_MC_REL_JMP_S32(i32Imm);
4870 } IEM_MC_ENDIF();
4871 IEM_MC_END();
4872 }
4873 return VINF_SUCCESS;
4874}
4875
4876
4877/** Opcode 0x0f 0x86. */
4878FNIEMOP_DEF(iemOp_jbe_Jv)
4879{
4880 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4881 IEMOP_HLP_MIN_386();
4882 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4883 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4884 {
4885 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4887
4888 IEM_MC_BEGIN(0, 0);
4889 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4890 IEM_MC_REL_JMP_S16(i16Imm);
4891 } IEM_MC_ELSE() {
4892 IEM_MC_ADVANCE_RIP();
4893 } IEM_MC_ENDIF();
4894 IEM_MC_END();
4895 }
4896 else
4897 {
4898 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4900
4901 IEM_MC_BEGIN(0, 0);
4902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4903 IEM_MC_REL_JMP_S32(i32Imm);
4904 } IEM_MC_ELSE() {
4905 IEM_MC_ADVANCE_RIP();
4906 } IEM_MC_ENDIF();
4907 IEM_MC_END();
4908 }
4909 return VINF_SUCCESS;
4910}
4911
4912
4913/** Opcode 0x0f 0x87. */
4914FNIEMOP_DEF(iemOp_jnbe_Jv)
4915{
4916 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4917 IEMOP_HLP_MIN_386();
4918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4919 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4920 {
4921 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4923
4924 IEM_MC_BEGIN(0, 0);
4925 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4926 IEM_MC_ADVANCE_RIP();
4927 } IEM_MC_ELSE() {
4928 IEM_MC_REL_JMP_S16(i16Imm);
4929 } IEM_MC_ENDIF();
4930 IEM_MC_END();
4931 }
4932 else
4933 {
4934 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936
4937 IEM_MC_BEGIN(0, 0);
4938 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4939 IEM_MC_ADVANCE_RIP();
4940 } IEM_MC_ELSE() {
4941 IEM_MC_REL_JMP_S32(i32Imm);
4942 } IEM_MC_ENDIF();
4943 IEM_MC_END();
4944 }
4945 return VINF_SUCCESS;
4946}
4947
4948
4949/** Opcode 0x0f 0x88. */
4950FNIEMOP_DEF(iemOp_js_Jv)
4951{
4952 IEMOP_MNEMONIC(js_Jv, "js Jv");
4953 IEMOP_HLP_MIN_386();
4954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4955 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4956 {
4957 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4959
4960 IEM_MC_BEGIN(0, 0);
4961 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4962 IEM_MC_REL_JMP_S16(i16Imm);
4963 } IEM_MC_ELSE() {
4964 IEM_MC_ADVANCE_RIP();
4965 } IEM_MC_ENDIF();
4966 IEM_MC_END();
4967 }
4968 else
4969 {
4970 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972
4973 IEM_MC_BEGIN(0, 0);
4974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4975 IEM_MC_REL_JMP_S32(i32Imm);
4976 } IEM_MC_ELSE() {
4977 IEM_MC_ADVANCE_RIP();
4978 } IEM_MC_ENDIF();
4979 IEM_MC_END();
4980 }
4981 return VINF_SUCCESS;
4982}
4983
4984
4985/** Opcode 0x0f 0x89. */
4986FNIEMOP_DEF(iemOp_jns_Jv)
4987{
4988 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4989 IEMOP_HLP_MIN_386();
4990 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4991 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4992 {
4993 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995
4996 IEM_MC_BEGIN(0, 0);
4997 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4998 IEM_MC_ADVANCE_RIP();
4999 } IEM_MC_ELSE() {
5000 IEM_MC_REL_JMP_S16(i16Imm);
5001 } IEM_MC_ENDIF();
5002 IEM_MC_END();
5003 }
5004 else
5005 {
5006 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008
5009 IEM_MC_BEGIN(0, 0);
5010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5011 IEM_MC_ADVANCE_RIP();
5012 } IEM_MC_ELSE() {
5013 IEM_MC_REL_JMP_S32(i32Imm);
5014 } IEM_MC_ENDIF();
5015 IEM_MC_END();
5016 }
5017 return VINF_SUCCESS;
5018}
5019
5020
5021/** Opcode 0x0f 0x8a. */
5022FNIEMOP_DEF(iemOp_jp_Jv)
5023{
5024 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
5025 IEMOP_HLP_MIN_386();
5026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5027 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5028 {
5029 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5031
5032 IEM_MC_BEGIN(0, 0);
5033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5034 IEM_MC_REL_JMP_S16(i16Imm);
5035 } IEM_MC_ELSE() {
5036 IEM_MC_ADVANCE_RIP();
5037 } IEM_MC_ENDIF();
5038 IEM_MC_END();
5039 }
5040 else
5041 {
5042 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5044
5045 IEM_MC_BEGIN(0, 0);
5046 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5047 IEM_MC_REL_JMP_S32(i32Imm);
5048 } IEM_MC_ELSE() {
5049 IEM_MC_ADVANCE_RIP();
5050 } IEM_MC_ENDIF();
5051 IEM_MC_END();
5052 }
5053 return VINF_SUCCESS;
5054}
5055
5056
5057/** Opcode 0x0f 0x8b. */
5058FNIEMOP_DEF(iemOp_jnp_Jv)
5059{
5060 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
5061 IEMOP_HLP_MIN_386();
5062 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5063 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5064 {
5065 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5067
5068 IEM_MC_BEGIN(0, 0);
5069 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5070 IEM_MC_ADVANCE_RIP();
5071 } IEM_MC_ELSE() {
5072 IEM_MC_REL_JMP_S16(i16Imm);
5073 } IEM_MC_ENDIF();
5074 IEM_MC_END();
5075 }
5076 else
5077 {
5078 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5080
5081 IEM_MC_BEGIN(0, 0);
5082 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5083 IEM_MC_ADVANCE_RIP();
5084 } IEM_MC_ELSE() {
5085 IEM_MC_REL_JMP_S32(i32Imm);
5086 } IEM_MC_ENDIF();
5087 IEM_MC_END();
5088 }
5089 return VINF_SUCCESS;
5090}
5091
5092
5093/** Opcode 0x0f 0x8c. */
5094FNIEMOP_DEF(iemOp_jl_Jv)
5095{
5096 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
5097 IEMOP_HLP_MIN_386();
5098 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5099 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5100 {
5101 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103
5104 IEM_MC_BEGIN(0, 0);
5105 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5106 IEM_MC_REL_JMP_S16(i16Imm);
5107 } IEM_MC_ELSE() {
5108 IEM_MC_ADVANCE_RIP();
5109 } IEM_MC_ENDIF();
5110 IEM_MC_END();
5111 }
5112 else
5113 {
5114 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116
5117 IEM_MC_BEGIN(0, 0);
5118 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5119 IEM_MC_REL_JMP_S32(i32Imm);
5120 } IEM_MC_ELSE() {
5121 IEM_MC_ADVANCE_RIP();
5122 } IEM_MC_ENDIF();
5123 IEM_MC_END();
5124 }
5125 return VINF_SUCCESS;
5126}
5127
5128
5129/** Opcode 0x0f 0x8d. */
5130FNIEMOP_DEF(iemOp_jnl_Jv)
5131{
5132 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
5133 IEMOP_HLP_MIN_386();
5134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5135 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5136 {
5137 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5139
5140 IEM_MC_BEGIN(0, 0);
5141 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5142 IEM_MC_ADVANCE_RIP();
5143 } IEM_MC_ELSE() {
5144 IEM_MC_REL_JMP_S16(i16Imm);
5145 } IEM_MC_ENDIF();
5146 IEM_MC_END();
5147 }
5148 else
5149 {
5150 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152
5153 IEM_MC_BEGIN(0, 0);
5154 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5155 IEM_MC_ADVANCE_RIP();
5156 } IEM_MC_ELSE() {
5157 IEM_MC_REL_JMP_S32(i32Imm);
5158 } IEM_MC_ENDIF();
5159 IEM_MC_END();
5160 }
5161 return VINF_SUCCESS;
5162}
5163
5164
5165/** Opcode 0x0f 0x8e. */
5166FNIEMOP_DEF(iemOp_jle_Jv)
5167{
5168 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
5169 IEMOP_HLP_MIN_386();
5170 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5171 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5172 {
5173 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5175
5176 IEM_MC_BEGIN(0, 0);
5177 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5178 IEM_MC_REL_JMP_S16(i16Imm);
5179 } IEM_MC_ELSE() {
5180 IEM_MC_ADVANCE_RIP();
5181 } IEM_MC_ENDIF();
5182 IEM_MC_END();
5183 }
5184 else
5185 {
5186 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5188
5189 IEM_MC_BEGIN(0, 0);
5190 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5191 IEM_MC_REL_JMP_S32(i32Imm);
5192 } IEM_MC_ELSE() {
5193 IEM_MC_ADVANCE_RIP();
5194 } IEM_MC_ENDIF();
5195 IEM_MC_END();
5196 }
5197 return VINF_SUCCESS;
5198}
5199
5200
5201/** Opcode 0x0f 0x8f. */
5202FNIEMOP_DEF(iemOp_jnle_Jv)
5203{
5204 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
5205 IEMOP_HLP_MIN_386();
5206 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5207 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5208 {
5209 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5211
5212 IEM_MC_BEGIN(0, 0);
5213 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5214 IEM_MC_ADVANCE_RIP();
5215 } IEM_MC_ELSE() {
5216 IEM_MC_REL_JMP_S16(i16Imm);
5217 } IEM_MC_ENDIF();
5218 IEM_MC_END();
5219 }
5220 else
5221 {
5222 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224
5225 IEM_MC_BEGIN(0, 0);
5226 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5227 IEM_MC_ADVANCE_RIP();
5228 } IEM_MC_ELSE() {
5229 IEM_MC_REL_JMP_S32(i32Imm);
5230 } IEM_MC_ENDIF();
5231 IEM_MC_END();
5232 }
5233 return VINF_SUCCESS;
5234}
5235
5236
5237/** Opcode 0x0f 0x90. */
5238FNIEMOP_DEF(iemOp_seto_Eb)
5239{
5240 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
5241 IEMOP_HLP_MIN_386();
5242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5243
5244 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5245 * any way. AMD says it's "unused", whatever that means. We're
5246 * ignoring for now. */
5247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5248 {
5249 /* register target */
5250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5251 IEM_MC_BEGIN(0, 0);
5252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5253 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5254 } IEM_MC_ELSE() {
5255 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5256 } IEM_MC_ENDIF();
5257 IEM_MC_ADVANCE_RIP();
5258 IEM_MC_END();
5259 }
5260 else
5261 {
5262 /* memory target */
5263 IEM_MC_BEGIN(0, 1);
5264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5269 } IEM_MC_ELSE() {
5270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5271 } IEM_MC_ENDIF();
5272 IEM_MC_ADVANCE_RIP();
5273 IEM_MC_END();
5274 }
5275 return VINF_SUCCESS;
5276}
5277
5278
5279/** Opcode 0x0f 0x91. */
5280FNIEMOP_DEF(iemOp_setno_Eb)
5281{
5282 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
5283 IEMOP_HLP_MIN_386();
5284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5285
5286 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5287 * any way. AMD says it's "unused", whatever that means. We're
5288 * ignoring for now. */
5289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5290 {
5291 /* register target */
5292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5293 IEM_MC_BEGIN(0, 0);
5294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5295 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5296 } IEM_MC_ELSE() {
5297 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5298 } IEM_MC_ENDIF();
5299 IEM_MC_ADVANCE_RIP();
5300 IEM_MC_END();
5301 }
5302 else
5303 {
5304 /* memory target */
5305 IEM_MC_BEGIN(0, 1);
5306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5311 } IEM_MC_ELSE() {
5312 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5313 } IEM_MC_ENDIF();
5314 IEM_MC_ADVANCE_RIP();
5315 IEM_MC_END();
5316 }
5317 return VINF_SUCCESS;
5318}
5319
5320
5321/** Opcode 0x0f 0x92. */
5322FNIEMOP_DEF(iemOp_setc_Eb)
5323{
5324 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
5325 IEMOP_HLP_MIN_386();
5326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5327
5328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5329 * any way. AMD says it's "unused", whatever that means. We're
5330 * ignoring for now. */
5331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5332 {
5333 /* register target */
5334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5335 IEM_MC_BEGIN(0, 0);
5336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5337 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5338 } IEM_MC_ELSE() {
5339 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5340 } IEM_MC_ENDIF();
5341 IEM_MC_ADVANCE_RIP();
5342 IEM_MC_END();
5343 }
5344 else
5345 {
5346 /* memory target */
5347 IEM_MC_BEGIN(0, 1);
5348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5353 } IEM_MC_ELSE() {
5354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5355 } IEM_MC_ENDIF();
5356 IEM_MC_ADVANCE_RIP();
5357 IEM_MC_END();
5358 }
5359 return VINF_SUCCESS;
5360}
5361
5362
5363/** Opcode 0x0f 0x93. */
5364FNIEMOP_DEF(iemOp_setnc_Eb)
5365{
5366 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
5367 IEMOP_HLP_MIN_386();
5368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5369
5370 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5371 * any way. AMD says it's "unused", whatever that means. We're
5372 * ignoring for now. */
5373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5374 {
5375 /* register target */
5376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5377 IEM_MC_BEGIN(0, 0);
5378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5379 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5380 } IEM_MC_ELSE() {
5381 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5382 } IEM_MC_ENDIF();
5383 IEM_MC_ADVANCE_RIP();
5384 IEM_MC_END();
5385 }
5386 else
5387 {
5388 /* memory target */
5389 IEM_MC_BEGIN(0, 1);
5390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5394 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5395 } IEM_MC_ELSE() {
5396 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5397 } IEM_MC_ENDIF();
5398 IEM_MC_ADVANCE_RIP();
5399 IEM_MC_END();
5400 }
5401 return VINF_SUCCESS;
5402}
5403
5404
5405/** Opcode 0x0f 0x94. */
5406FNIEMOP_DEF(iemOp_sete_Eb)
5407{
5408 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
5409 IEMOP_HLP_MIN_386();
5410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5411
5412 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5413 * any way. AMD says it's "unused", whatever that means. We're
5414 * ignoring for now. */
5415 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5416 {
5417 /* register target */
5418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5419 IEM_MC_BEGIN(0, 0);
5420 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5421 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5422 } IEM_MC_ELSE() {
5423 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5424 } IEM_MC_ENDIF();
5425 IEM_MC_ADVANCE_RIP();
5426 IEM_MC_END();
5427 }
5428 else
5429 {
5430 /* memory target */
5431 IEM_MC_BEGIN(0, 1);
5432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5437 } IEM_MC_ELSE() {
5438 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5439 } IEM_MC_ENDIF();
5440 IEM_MC_ADVANCE_RIP();
5441 IEM_MC_END();
5442 }
5443 return VINF_SUCCESS;
5444}
5445
5446
5447/** Opcode 0x0f 0x95. */
5448FNIEMOP_DEF(iemOp_setne_Eb)
5449{
5450 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
5451 IEMOP_HLP_MIN_386();
5452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5453
5454 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5455 * any way. AMD says it's "unused", whatever that means. We're
5456 * ignoring for now. */
5457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5458 {
5459 /* register target */
5460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5461 IEM_MC_BEGIN(0, 0);
5462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5463 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5464 } IEM_MC_ELSE() {
5465 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5466 } IEM_MC_ENDIF();
5467 IEM_MC_ADVANCE_RIP();
5468 IEM_MC_END();
5469 }
5470 else
5471 {
5472 /* memory target */
5473 IEM_MC_BEGIN(0, 1);
5474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5478 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5479 } IEM_MC_ELSE() {
5480 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5481 } IEM_MC_ENDIF();
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 }
5485 return VINF_SUCCESS;
5486}
5487
5488
5489/** Opcode 0x0f 0x96. */
5490FNIEMOP_DEF(iemOp_setbe_Eb)
5491{
5492 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
5493 IEMOP_HLP_MIN_386();
5494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5495
5496 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5497 * any way. AMD says it's "unused", whatever that means. We're
5498 * ignoring for now. */
5499 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5500 {
5501 /* register target */
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5503 IEM_MC_BEGIN(0, 0);
5504 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5505 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5506 } IEM_MC_ELSE() {
5507 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5508 } IEM_MC_ENDIF();
5509 IEM_MC_ADVANCE_RIP();
5510 IEM_MC_END();
5511 }
5512 else
5513 {
5514 /* memory target */
5515 IEM_MC_BEGIN(0, 1);
5516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5519 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5520 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5521 } IEM_MC_ELSE() {
5522 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5523 } IEM_MC_ENDIF();
5524 IEM_MC_ADVANCE_RIP();
5525 IEM_MC_END();
5526 }
5527 return VINF_SUCCESS;
5528}
5529
5530
5531/** Opcode 0x0f 0x97. */
5532FNIEMOP_DEF(iemOp_setnbe_Eb)
5533{
5534 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
5535 IEMOP_HLP_MIN_386();
5536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5537
5538 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5539 * any way. AMD says it's "unused", whatever that means. We're
5540 * ignoring for now. */
5541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5542 {
5543 /* register target */
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545 IEM_MC_BEGIN(0, 0);
5546 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5547 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5548 } IEM_MC_ELSE() {
5549 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5550 } IEM_MC_ENDIF();
5551 IEM_MC_ADVANCE_RIP();
5552 IEM_MC_END();
5553 }
5554 else
5555 {
5556 /* memory target */
5557 IEM_MC_BEGIN(0, 1);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
5562 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5563 } IEM_MC_ELSE() {
5564 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5565 } IEM_MC_ENDIF();
5566 IEM_MC_ADVANCE_RIP();
5567 IEM_MC_END();
5568 }
5569 return VINF_SUCCESS;
5570}
5571
5572
5573/** Opcode 0x0f 0x98. */
5574FNIEMOP_DEF(iemOp_sets_Eb)
5575{
5576 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
5577 IEMOP_HLP_MIN_386();
5578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5579
5580 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5581 * any way. AMD says it's "unused", whatever that means. We're
5582 * ignoring for now. */
5583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5584 {
5585 /* register target */
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 IEM_MC_BEGIN(0, 0);
5588 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5589 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5590 } IEM_MC_ELSE() {
5591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5592 } IEM_MC_ENDIF();
5593 IEM_MC_ADVANCE_RIP();
5594 IEM_MC_END();
5595 }
5596 else
5597 {
5598 /* memory target */
5599 IEM_MC_BEGIN(0, 1);
5600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5604 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5605 } IEM_MC_ELSE() {
5606 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5607 } IEM_MC_ENDIF();
5608 IEM_MC_ADVANCE_RIP();
5609 IEM_MC_END();
5610 }
5611 return VINF_SUCCESS;
5612}
5613
5614
5615/** Opcode 0x0f 0x99. */
5616FNIEMOP_DEF(iemOp_setns_Eb)
5617{
5618 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
5619 IEMOP_HLP_MIN_386();
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621
5622 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5623 * any way. AMD says it's "unused", whatever that means. We're
5624 * ignoring for now. */
5625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5626 {
5627 /* register target */
5628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5629 IEM_MC_BEGIN(0, 0);
5630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5631 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5632 } IEM_MC_ELSE() {
5633 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5634 } IEM_MC_ENDIF();
5635 IEM_MC_ADVANCE_RIP();
5636 IEM_MC_END();
5637 }
5638 else
5639 {
5640 /* memory target */
5641 IEM_MC_BEGIN(0, 1);
5642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5645 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
5646 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5647 } IEM_MC_ELSE() {
5648 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5649 } IEM_MC_ENDIF();
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 }
5653 return VINF_SUCCESS;
5654}
5655
5656
5657/** Opcode 0x0f 0x9a. */
5658FNIEMOP_DEF(iemOp_setp_Eb)
5659{
5660 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
5661 IEMOP_HLP_MIN_386();
5662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5663
5664 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5665 * any way. AMD says it's "unused", whatever that means. We're
5666 * ignoring for now. */
5667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5668 {
5669 /* register target */
5670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5671 IEM_MC_BEGIN(0, 0);
5672 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5673 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5674 } IEM_MC_ELSE() {
5675 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5676 } IEM_MC_ENDIF();
5677 IEM_MC_ADVANCE_RIP();
5678 IEM_MC_END();
5679 }
5680 else
5681 {
5682 /* memory target */
5683 IEM_MC_BEGIN(0, 1);
5684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5688 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5689 } IEM_MC_ELSE() {
5690 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5691 } IEM_MC_ENDIF();
5692 IEM_MC_ADVANCE_RIP();
5693 IEM_MC_END();
5694 }
5695 return VINF_SUCCESS;
5696}
5697
5698
5699/** Opcode 0x0f 0x9b. */
5700FNIEMOP_DEF(iemOp_setnp_Eb)
5701{
5702 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
5703 IEMOP_HLP_MIN_386();
5704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5705
5706 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5707 * any way. AMD says it's "unused", whatever that means. We're
5708 * ignoring for now. */
5709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5710 {
5711 /* register target */
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_BEGIN(0, 0);
5714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5715 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5716 } IEM_MC_ELSE() {
5717 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5718 } IEM_MC_ENDIF();
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 }
5722 else
5723 {
5724 /* memory target */
5725 IEM_MC_BEGIN(0, 1);
5726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5729 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
5730 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5731 } IEM_MC_ELSE() {
5732 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5733 } IEM_MC_ENDIF();
5734 IEM_MC_ADVANCE_RIP();
5735 IEM_MC_END();
5736 }
5737 return VINF_SUCCESS;
5738}
5739
5740
5741/** Opcode 0x0f 0x9c. */
5742FNIEMOP_DEF(iemOp_setl_Eb)
5743{
5744 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
5745 IEMOP_HLP_MIN_386();
5746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5747
5748 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5749 * any way. AMD says it's "unused", whatever that means. We're
5750 * ignoring for now. */
5751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5752 {
5753 /* register target */
5754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5755 IEM_MC_BEGIN(0, 0);
5756 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5757 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5758 } IEM_MC_ELSE() {
5759 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5760 } IEM_MC_ENDIF();
5761 IEM_MC_ADVANCE_RIP();
5762 IEM_MC_END();
5763 }
5764 else
5765 {
5766 /* memory target */
5767 IEM_MC_BEGIN(0, 1);
5768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5771 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5772 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5773 } IEM_MC_ELSE() {
5774 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5775 } IEM_MC_ENDIF();
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 }
5779 return VINF_SUCCESS;
5780}
5781
5782
5783/** Opcode 0x0f 0x9d. */
5784FNIEMOP_DEF(iemOp_setnl_Eb)
5785{
5786 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5787 IEMOP_HLP_MIN_386();
5788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5789
5790 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5791 * any way. AMD says it's "unused", whatever that means. We're
5792 * ignoring for now. */
5793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5794 {
5795 /* register target */
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_BEGIN(0, 0);
5798 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5799 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5800 } IEM_MC_ELSE() {
5801 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5802 } IEM_MC_ENDIF();
5803 IEM_MC_ADVANCE_RIP();
5804 IEM_MC_END();
5805 }
5806 else
5807 {
5808 /* memory target */
5809 IEM_MC_BEGIN(0, 1);
5810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5813 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5814 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5815 } IEM_MC_ELSE() {
5816 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5817 } IEM_MC_ENDIF();
5818 IEM_MC_ADVANCE_RIP();
5819 IEM_MC_END();
5820 }
5821 return VINF_SUCCESS;
5822}
5823
5824
5825/** Opcode 0x0f 0x9e. */
5826FNIEMOP_DEF(iemOp_setle_Eb)
5827{
5828 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5829 IEMOP_HLP_MIN_386();
5830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5831
5832 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5833 * any way. AMD says it's "unused", whatever that means. We're
5834 * ignoring for now. */
5835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5836 {
5837 /* register target */
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 IEM_MC_BEGIN(0, 0);
5840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5841 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5842 } IEM_MC_ELSE() {
5843 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5844 } IEM_MC_ENDIF();
5845 IEM_MC_ADVANCE_RIP();
5846 IEM_MC_END();
5847 }
5848 else
5849 {
5850 /* memory target */
5851 IEM_MC_BEGIN(0, 1);
5852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5855 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5856 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5857 } IEM_MC_ELSE() {
5858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5859 } IEM_MC_ENDIF();
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 }
5863 return VINF_SUCCESS;
5864}
5865
5866
5867/** Opcode 0x0f 0x9f. */
5868FNIEMOP_DEF(iemOp_setnle_Eb)
5869{
5870 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5871 IEMOP_HLP_MIN_386();
5872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5873
5874 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5875 * any way. AMD says it's "unused", whatever that means. We're
5876 * ignoring for now. */
5877 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5878 {
5879 /* register target */
5880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5881 IEM_MC_BEGIN(0, 0);
5882 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5883 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5884 } IEM_MC_ELSE() {
5885 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5886 } IEM_MC_ENDIF();
5887 IEM_MC_ADVANCE_RIP();
5888 IEM_MC_END();
5889 }
5890 else
5891 {
5892 /* memory target */
5893 IEM_MC_BEGIN(0, 1);
5894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5898 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5899 } IEM_MC_ELSE() {
5900 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5901 } IEM_MC_ENDIF();
5902 IEM_MC_ADVANCE_RIP();
5903 IEM_MC_END();
5904 }
5905 return VINF_SUCCESS;
5906}
5907
5908
5909/**
5910 * Common 'push segment-register' helper.
5911 */
5912FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5913{
5914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5915 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
5916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5917
5918 switch (pVCpu->iem.s.enmEffOpSize)
5919 {
5920 case IEMMODE_16BIT:
5921 IEM_MC_BEGIN(0, 1);
5922 IEM_MC_LOCAL(uint16_t, u16Value);
5923 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5924 IEM_MC_PUSH_U16(u16Value);
5925 IEM_MC_ADVANCE_RIP();
5926 IEM_MC_END();
5927 break;
5928
5929 case IEMMODE_32BIT:
5930 IEM_MC_BEGIN(0, 1);
5931 IEM_MC_LOCAL(uint32_t, u32Value);
5932 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5933 IEM_MC_PUSH_U32_SREG(u32Value);
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 break;
5937
5938 case IEMMODE_64BIT:
5939 IEM_MC_BEGIN(0, 1);
5940 IEM_MC_LOCAL(uint64_t, u64Value);
5941 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5942 IEM_MC_PUSH_U64(u64Value);
5943 IEM_MC_ADVANCE_RIP();
5944 IEM_MC_END();
5945 break;
5946 }
5947
5948 return VINF_SUCCESS;
5949}
5950
5951
5952/** Opcode 0x0f 0xa0. */
5953FNIEMOP_DEF(iemOp_push_fs)
5954{
5955 IEMOP_MNEMONIC(push_fs, "push fs");
5956 IEMOP_HLP_MIN_386();
5957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5958 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5959}
5960
5961
5962/** Opcode 0x0f 0xa1. */
5963FNIEMOP_DEF(iemOp_pop_fs)
5964{
5965 IEMOP_MNEMONIC(pop_fs, "pop fs");
5966 IEMOP_HLP_MIN_386();
5967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5968 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5969}
5970
5971
5972/** Opcode 0x0f 0xa2. */
5973FNIEMOP_DEF(iemOp_cpuid)
5974{
5975 IEMOP_MNEMONIC(cpuid, "cpuid");
5976 IEMOP_HLP_MIN_486(); /* not all 486es. */
5977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5978 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5979}
5980
5981
5982/**
5983 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5984 * iemOp_bts_Ev_Gv.
5985 */
5986FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5987{
5988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5990
5991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5992 {
5993 /* register destination. */
5994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5995 switch (pVCpu->iem.s.enmEffOpSize)
5996 {
5997 case IEMMODE_16BIT:
5998 IEM_MC_BEGIN(3, 0);
5999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6000 IEM_MC_ARG(uint16_t, u16Src, 1);
6001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6002
6003 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6004 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
6005 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6006 IEM_MC_REF_EFLAGS(pEFlags);
6007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6008
6009 IEM_MC_ADVANCE_RIP();
6010 IEM_MC_END();
6011 return VINF_SUCCESS;
6012
6013 case IEMMODE_32BIT:
6014 IEM_MC_BEGIN(3, 0);
6015 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6016 IEM_MC_ARG(uint32_t, u32Src, 1);
6017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6018
6019 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6020 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
6021 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6022 IEM_MC_REF_EFLAGS(pEFlags);
6023 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6024
6025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6026 IEM_MC_ADVANCE_RIP();
6027 IEM_MC_END();
6028 return VINF_SUCCESS;
6029
6030 case IEMMODE_64BIT:
6031 IEM_MC_BEGIN(3, 0);
6032 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6033 IEM_MC_ARG(uint64_t, u64Src, 1);
6034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6035
6036 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6037 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
6038 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6039 IEM_MC_REF_EFLAGS(pEFlags);
6040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6041
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049 else
6050 {
6051 /* memory destination. */
6052
6053 uint32_t fAccess;
6054 if (pImpl->pfnLockedU16)
6055 fAccess = IEM_ACCESS_DATA_RW;
6056 else /* BT */
6057 fAccess = IEM_ACCESS_DATA_R;
6058
6059 /** @todo test negative bit offsets! */
6060 switch (pVCpu->iem.s.enmEffOpSize)
6061 {
6062 case IEMMODE_16BIT:
6063 IEM_MC_BEGIN(3, 2);
6064 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6065 IEM_MC_ARG(uint16_t, u16Src, 1);
6066 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6068 IEM_MC_LOCAL(int16_t, i16AddrAdj);
6069
6070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6071 if (pImpl->pfnLockedU16)
6072 IEMOP_HLP_DONE_DECODING();
6073 else
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6075 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6076 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
6077 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
6078 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
6079 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
6080 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
6081 IEM_MC_FETCH_EFLAGS(EFlags);
6082
6083 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6086 else
6087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6088 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6089
6090 IEM_MC_COMMIT_EFLAGS(EFlags);
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 return VINF_SUCCESS;
6094
6095 case IEMMODE_32BIT:
6096 IEM_MC_BEGIN(3, 2);
6097 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6098 IEM_MC_ARG(uint32_t, u32Src, 1);
6099 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6101 IEM_MC_LOCAL(int32_t, i32AddrAdj);
6102
6103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6104 if (pImpl->pfnLockedU16)
6105 IEMOP_HLP_DONE_DECODING();
6106 else
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6109 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
6110 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
6111 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
6112 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
6113 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
6114 IEM_MC_FETCH_EFLAGS(EFlags);
6115
6116 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6117 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6118 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6119 else
6120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6121 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6122
6123 IEM_MC_COMMIT_EFLAGS(EFlags);
6124 IEM_MC_ADVANCE_RIP();
6125 IEM_MC_END();
6126 return VINF_SUCCESS;
6127
6128 case IEMMODE_64BIT:
6129 IEM_MC_BEGIN(3, 2);
6130 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6131 IEM_MC_ARG(uint64_t, u64Src, 1);
6132 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6134 IEM_MC_LOCAL(int64_t, i64AddrAdj);
6135
6136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6137 if (pImpl->pfnLockedU16)
6138 IEMOP_HLP_DONE_DECODING();
6139 else
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6142 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
6143 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
6144 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
6145 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
6146 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
6147 IEM_MC_FETCH_EFLAGS(EFlags);
6148
6149 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6150 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6152 else
6153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6154 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6155
6156 IEM_MC_COMMIT_EFLAGS(EFlags);
6157 IEM_MC_ADVANCE_RIP();
6158 IEM_MC_END();
6159 return VINF_SUCCESS;
6160
6161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6162 }
6163 }
6164}
6165
6166
6167/** Opcode 0x0f 0xa3. */
6168FNIEMOP_DEF(iemOp_bt_Ev_Gv)
6169{
6170 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
6171 IEMOP_HLP_MIN_386();
6172 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
6173}
6174
6175
6176/**
6177 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
6178 */
6179FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
6180{
6181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6183
6184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6185 {
6186 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6188
6189 switch (pVCpu->iem.s.enmEffOpSize)
6190 {
6191 case IEMMODE_16BIT:
6192 IEM_MC_BEGIN(4, 0);
6193 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6194 IEM_MC_ARG(uint16_t, u16Src, 1);
6195 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6196 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6197
6198 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6199 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6200 IEM_MC_REF_EFLAGS(pEFlags);
6201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6202
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 return VINF_SUCCESS;
6206
6207 case IEMMODE_32BIT:
6208 IEM_MC_BEGIN(4, 0);
6209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6210 IEM_MC_ARG(uint32_t, u32Src, 1);
6211 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6213
6214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6216 IEM_MC_REF_EFLAGS(pEFlags);
6217 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6218
6219 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6220 IEM_MC_ADVANCE_RIP();
6221 IEM_MC_END();
6222 return VINF_SUCCESS;
6223
6224 case IEMMODE_64BIT:
6225 IEM_MC_BEGIN(4, 0);
6226 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6227 IEM_MC_ARG(uint64_t, u64Src, 1);
6228 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
6229 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6230
6231 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6232 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6233 IEM_MC_REF_EFLAGS(pEFlags);
6234 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6235
6236 IEM_MC_ADVANCE_RIP();
6237 IEM_MC_END();
6238 return VINF_SUCCESS;
6239
6240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6241 }
6242 }
6243 else
6244 {
6245 switch (pVCpu->iem.s.enmEffOpSize)
6246 {
6247 case IEMMODE_16BIT:
6248 IEM_MC_BEGIN(4, 2);
6249 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6250 IEM_MC_ARG(uint16_t, u16Src, 1);
6251 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6252 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6254
6255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6256 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6257 IEM_MC_ASSIGN(cShiftArg, cShift);
6258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6259 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6260 IEM_MC_FETCH_EFLAGS(EFlags);
6261 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6263
6264 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6265 IEM_MC_COMMIT_EFLAGS(EFlags);
6266 IEM_MC_ADVANCE_RIP();
6267 IEM_MC_END();
6268 return VINF_SUCCESS;
6269
6270 case IEMMODE_32BIT:
6271 IEM_MC_BEGIN(4, 2);
6272 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6273 IEM_MC_ARG(uint32_t, u32Src, 1);
6274 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6277
6278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6279 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6280 IEM_MC_ASSIGN(cShiftArg, cShift);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6283 IEM_MC_FETCH_EFLAGS(EFlags);
6284 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6286
6287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6288 IEM_MC_COMMIT_EFLAGS(EFlags);
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 case IEMMODE_64BIT:
6294 IEM_MC_BEGIN(4, 2);
6295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6296 IEM_MC_ARG(uint64_t, u64Src, 1);
6297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6300
6301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6302 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6303 IEM_MC_ASSIGN(cShiftArg, cShift);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6306 IEM_MC_FETCH_EFLAGS(EFlags);
6307 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6308 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6309
6310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6311 IEM_MC_COMMIT_EFLAGS(EFlags);
6312 IEM_MC_ADVANCE_RIP();
6313 IEM_MC_END();
6314 return VINF_SUCCESS;
6315
6316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6317 }
6318 }
6319}
6320
6321
6322/**
6323 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
6324 */
6325FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
6326{
6327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
6329
6330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6331 {
6332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6333
6334 switch (pVCpu->iem.s.enmEffOpSize)
6335 {
6336 case IEMMODE_16BIT:
6337 IEM_MC_BEGIN(4, 0);
6338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6339 IEM_MC_ARG(uint16_t, u16Src, 1);
6340 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6341 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6342
6343 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6344 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6345 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6346 IEM_MC_REF_EFLAGS(pEFlags);
6347 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6348
6349 IEM_MC_ADVANCE_RIP();
6350 IEM_MC_END();
6351 return VINF_SUCCESS;
6352
6353 case IEMMODE_32BIT:
6354 IEM_MC_BEGIN(4, 0);
6355 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6356 IEM_MC_ARG(uint32_t, u32Src, 1);
6357 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6358 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6359
6360 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6361 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6362 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6363 IEM_MC_REF_EFLAGS(pEFlags);
6364 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6365
6366 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6367 IEM_MC_ADVANCE_RIP();
6368 IEM_MC_END();
6369 return VINF_SUCCESS;
6370
6371 case IEMMODE_64BIT:
6372 IEM_MC_BEGIN(4, 0);
6373 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6374 IEM_MC_ARG(uint64_t, u64Src, 1);
6375 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6376 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6377
6378 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6379 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6380 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6381 IEM_MC_REF_EFLAGS(pEFlags);
6382 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6383
6384 IEM_MC_ADVANCE_RIP();
6385 IEM_MC_END();
6386 return VINF_SUCCESS;
6387
6388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6389 }
6390 }
6391 else
6392 {
6393 switch (pVCpu->iem.s.enmEffOpSize)
6394 {
6395 case IEMMODE_16BIT:
6396 IEM_MC_BEGIN(4, 2);
6397 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6398 IEM_MC_ARG(uint16_t, u16Src, 1);
6399 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6400 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6402
6403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6405 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6406 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6407 IEM_MC_FETCH_EFLAGS(EFlags);
6408 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6409 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
6410
6411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6412 IEM_MC_COMMIT_EFLAGS(EFlags);
6413 IEM_MC_ADVANCE_RIP();
6414 IEM_MC_END();
6415 return VINF_SUCCESS;
6416
6417 case IEMMODE_32BIT:
6418 IEM_MC_BEGIN(4, 2);
6419 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6420 IEM_MC_ARG(uint32_t, u32Src, 1);
6421 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6424
6425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6427 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6428 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6429 IEM_MC_FETCH_EFLAGS(EFlags);
6430 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6431 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
6432
6433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6434 IEM_MC_COMMIT_EFLAGS(EFlags);
6435 IEM_MC_ADVANCE_RIP();
6436 IEM_MC_END();
6437 return VINF_SUCCESS;
6438
6439 case IEMMODE_64BIT:
6440 IEM_MC_BEGIN(4, 2);
6441 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6442 IEM_MC_ARG(uint64_t, u64Src, 1);
6443 IEM_MC_ARG(uint8_t, cShiftArg, 2);
6444 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6446
6447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6450 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6451 IEM_MC_FETCH_EFLAGS(EFlags);
6452 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6453 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
6454
6455 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6456 IEM_MC_COMMIT_EFLAGS(EFlags);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6462 }
6463 }
6464}
6465
6466
6467
6468/** Opcode 0x0f 0xa4. */
6469FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
6470{
6471 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
6472 IEMOP_HLP_MIN_386();
6473 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
6474}
6475
6476
6477/** Opcode 0x0f 0xa5. */
6478FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
6479{
6480 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
6481 IEMOP_HLP_MIN_386();
6482 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
6483}
6484
6485
6486/** Opcode 0x0f 0xa8. */
6487FNIEMOP_DEF(iemOp_push_gs)
6488{
6489 IEMOP_MNEMONIC(push_gs, "push gs");
6490 IEMOP_HLP_MIN_386();
6491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6492 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
6493}
6494
6495
6496/** Opcode 0x0f 0xa9. */
6497FNIEMOP_DEF(iemOp_pop_gs)
6498{
6499 IEMOP_MNEMONIC(pop_gs, "pop gs");
6500 IEMOP_HLP_MIN_386();
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
6503}
6504
6505
6506/** Opcode 0x0f 0xaa. */
6507FNIEMOP_DEF(iemOp_rsm)
6508{
6509 IEMOP_MNEMONIC(rsm, "rsm");
6510 IEMOP_HLP_SVM_CTRL_INTERCEPT(pVCpu, SVM_CTRL_INTERCEPT_RSM, SVM_EXIT_RSM, 0, 0);
6511 /** @todo rsm - for the regular case (above handles only the SVM nested-guest
6512 * intercept). */
6513 IEMOP_BITCH_ABOUT_STUB();
6514 return IEMOP_RAISE_INVALID_OPCODE();
6515}
6516
6517//IEMOP_HLP_MIN_386();
6518
6519
6520/** Opcode 0x0f 0xab. */
6521FNIEMOP_DEF(iemOp_bts_Ev_Gv)
6522{
6523 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
6524 IEMOP_HLP_MIN_386();
6525 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
6526}
6527
6528
6529/** Opcode 0x0f 0xac. */
6530FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
6531{
6532 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
6533 IEMOP_HLP_MIN_386();
6534 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
6535}
6536
6537
6538/** Opcode 0x0f 0xad. */
6539FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
6540{
6541 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
6542 IEMOP_HLP_MIN_386();
6543 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
6544}
6545
6546
6547/** Opcode 0x0f 0xae mem/0. */
6548FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
6549{
6550 IEMOP_MNEMONIC(fxsave, "fxsave m512");
6551 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6552 return IEMOP_RAISE_INVALID_OPCODE();
6553
6554 IEM_MC_BEGIN(3, 1);
6555 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6556 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6557 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6560 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6561 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6562 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
6563 IEM_MC_END();
6564 return VINF_SUCCESS;
6565}
6566
6567
6568/** Opcode 0x0f 0xae mem/1. */
6569FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
6570{
6571 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
6572 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
6573 return IEMOP_RAISE_INVALID_OPCODE();
6574
6575 IEM_MC_BEGIN(3, 1);
6576 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6577 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6578 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6582 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6583 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6584 IEM_MC_END();
6585 return VINF_SUCCESS;
6586}
6587
6588
6589/**
6590 * @opmaps grp15
6591 * @opcode !11/2
6592 * @oppfx none
6593 * @opcpuid sse
6594 * @opgroup og_sse_mxcsrsm
6595 * @opxcpttype 5
6596 * @optest op1=0 -> mxcsr=0
6597 * @optest op1=0x2083 -> mxcsr=0x2083
6598 * @optest op1=0xfffffffe -> value.xcpt=0xd
6599 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
6600 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
6601 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
6602 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
6603 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
6604 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6605 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6606 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6607 */
6608FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
6609{
6610 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6611 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6612 return IEMOP_RAISE_INVALID_OPCODE();
6613
6614 IEM_MC_BEGIN(2, 0);
6615 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6616 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6619 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6620 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6621 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
6622 IEM_MC_END();
6623 return VINF_SUCCESS;
6624}
6625
6626
6627/**
6628 * @opmaps grp15
6629 * @opcode !11/3
6630 * @oppfx none
6631 * @opcpuid sse
6632 * @opgroup og_sse_mxcsrsm
6633 * @opxcpttype 5
6634 * @optest mxcsr=0 -> op1=0
6635 * @optest mxcsr=0x2083 -> op1=0x2083
6636 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
6637 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
6638 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
6639 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
6640 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
6641 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
6642 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
6643 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
6644 */
6645FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
6646{
6647 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6648 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
6649 return IEMOP_RAISE_INVALID_OPCODE();
6650
6651 IEM_MC_BEGIN(2, 0);
6652 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6653 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6657 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6658 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
6659 IEM_MC_END();
6660 return VINF_SUCCESS;
6661}
6662
6663
6664/**
6665 * @opmaps grp15
6666 * @opcode !11/4
6667 * @oppfx none
6668 * @opcpuid xsave
6669 * @opgroup og_system
6670 * @opxcpttype none
6671 */
6672FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
6673{
6674 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
6675 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6676 return IEMOP_RAISE_INVALID_OPCODE();
6677
6678 IEM_MC_BEGIN(3, 0);
6679 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6680 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6681 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6684 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6685 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6686 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
6687 IEM_MC_END();
6688 return VINF_SUCCESS;
6689}
6690
6691
6692/**
6693 * @opmaps grp15
6694 * @opcode !11/5
6695 * @oppfx none
6696 * @opcpuid xsave
6697 * @opgroup og_system
6698 * @opxcpttype none
6699 */
6700FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
6701{
6702 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
6703 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
6704 return IEMOP_RAISE_INVALID_OPCODE();
6705
6706 IEM_MC_BEGIN(3, 0);
6707 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6708 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6709 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6713 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6714 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
6715 IEM_MC_END();
6716 return VINF_SUCCESS;
6717}
6718
6719/** Opcode 0x0f 0xae mem/6. */
6720FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
6721
6722/**
6723 * @opmaps grp15
6724 * @opcode !11/7
6725 * @oppfx none
6726 * @opcpuid clfsh
6727 * @opgroup og_cachectl
6728 * @optest op1=1 ->
6729 */
6730FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
6731{
6732 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6733 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
6734 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6735
6736 IEM_MC_BEGIN(2, 0);
6737 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6741 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6742 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6743 IEM_MC_END();
6744 return VINF_SUCCESS;
6745}
6746
6747/**
6748 * @opmaps grp15
6749 * @opcode !11/7
6750 * @oppfx 0x66
6751 * @opcpuid clflushopt
6752 * @opgroup og_cachectl
6753 * @optest op1=1 ->
6754 */
6755FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
6756{
6757 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6758 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
6759 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
6760
6761 IEM_MC_BEGIN(2, 0);
6762 IEM_MC_ARG(uint8_t, iEffSeg, 0);
6763 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
6764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6767 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
6768 IEM_MC_END();
6769 return VINF_SUCCESS;
6770}
6771
6772
6773/** Opcode 0x0f 0xae 11b/5. */
6774FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
6775{
6776 RT_NOREF_PV(bRm);
6777 IEMOP_MNEMONIC(lfence, "lfence");
6778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6779 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6780 return IEMOP_RAISE_INVALID_OPCODE();
6781
6782 IEM_MC_BEGIN(0, 0);
6783 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6784 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
6785 else
6786 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6787 IEM_MC_ADVANCE_RIP();
6788 IEM_MC_END();
6789 return VINF_SUCCESS;
6790}
6791
6792
6793/** Opcode 0x0f 0xae 11b/6. */
6794FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
6795{
6796 RT_NOREF_PV(bRm);
6797 IEMOP_MNEMONIC(mfence, "mfence");
6798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6799 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6800 return IEMOP_RAISE_INVALID_OPCODE();
6801
6802 IEM_MC_BEGIN(0, 0);
6803 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6804 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
6805 else
6806 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6807 IEM_MC_ADVANCE_RIP();
6808 IEM_MC_END();
6809 return VINF_SUCCESS;
6810}
6811
6812
6813/** Opcode 0x0f 0xae 11b/7. */
6814FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
6815{
6816 RT_NOREF_PV(bRm);
6817 IEMOP_MNEMONIC(sfence, "sfence");
6818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6819 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6820 return IEMOP_RAISE_INVALID_OPCODE();
6821
6822 IEM_MC_BEGIN(0, 0);
6823 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
6824 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
6825 else
6826 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
6827 IEM_MC_ADVANCE_RIP();
6828 IEM_MC_END();
6829 return VINF_SUCCESS;
6830}
6831
6832
6833/** Opcode 0xf3 0x0f 0xae 11b/0. */
6834FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
6835
6836/** Opcode 0xf3 0x0f 0xae 11b/1. */
6837FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
6838
6839/** Opcode 0xf3 0x0f 0xae 11b/2. */
6840FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
6841
6842/** Opcode 0xf3 0x0f 0xae 11b/3. */
6843FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
6844
6845
6846/**
6847 * Group 15 jump table for register variant.
6848 */
6849IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
6850{ /* pfx: none, 066h, 0f3h, 0f2h */
6851 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
6852 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
6853 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
6854 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
6855 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
6856 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6857 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6858 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6859};
6860AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
6861
6862
6863/**
6864 * Group 15 jump table for memory variant.
6865 */
6866IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
6867{ /* pfx: none, 066h, 0f3h, 0f2h */
6868 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6869 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6870 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6871 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6872 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6873 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6874 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6875 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
6876};
6877AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
6878
6879
6880/** Opcode 0x0f 0xae. */
6881FNIEMOP_DEF(iemOp_Grp15)
6882{
6883 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
6884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6885 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6886 /* register, register */
6887 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6888 + pVCpu->iem.s.idxPrefix], bRm);
6889 /* memory, register */
6890 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
6891 + pVCpu->iem.s.idxPrefix], bRm);
6892}
6893
6894
6895/** Opcode 0x0f 0xaf. */
6896FNIEMOP_DEF(iemOp_imul_Gv_Ev)
6897{
6898 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
6899 IEMOP_HLP_MIN_386();
6900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6901 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
6902}
6903
6904
6905/** Opcode 0x0f 0xb0. */
6906FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
6907{
6908 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
6909 IEMOP_HLP_MIN_486();
6910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6911
6912 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6913 {
6914 IEMOP_HLP_DONE_DECODING();
6915 IEM_MC_BEGIN(4, 0);
6916 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6917 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6918 IEM_MC_ARG(uint8_t, u8Src, 2);
6919 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6920
6921 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6922 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6923 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
6924 IEM_MC_REF_EFLAGS(pEFlags);
6925 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6926 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6927 else
6928 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6929
6930 IEM_MC_ADVANCE_RIP();
6931 IEM_MC_END();
6932 }
6933 else
6934 {
6935 IEM_MC_BEGIN(4, 3);
6936 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6937 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6938 IEM_MC_ARG(uint8_t, u8Src, 2);
6939 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6941 IEM_MC_LOCAL(uint8_t, u8Al);
6942
6943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6944 IEMOP_HLP_DONE_DECODING();
6945 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6946 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6947 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6948 IEM_MC_FETCH_EFLAGS(EFlags);
6949 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6950 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6952 else
6953 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6954
6955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6956 IEM_MC_COMMIT_EFLAGS(EFlags);
6957 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 }
6961 return VINF_SUCCESS;
6962}
6963
6964/** Opcode 0x0f 0xb1. */
6965FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6966{
6967 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6968 IEMOP_HLP_MIN_486();
6969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6970
6971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6972 {
6973 IEMOP_HLP_DONE_DECODING();
6974 switch (pVCpu->iem.s.enmEffOpSize)
6975 {
6976 case IEMMODE_16BIT:
6977 IEM_MC_BEGIN(4, 0);
6978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6979 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6980 IEM_MC_ARG(uint16_t, u16Src, 2);
6981 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6982
6983 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6984 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6985 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6986 IEM_MC_REF_EFLAGS(pEFlags);
6987 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6988 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6989 else
6990 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6991
6992 IEM_MC_ADVANCE_RIP();
6993 IEM_MC_END();
6994 return VINF_SUCCESS;
6995
6996 case IEMMODE_32BIT:
6997 IEM_MC_BEGIN(4, 0);
6998 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6999 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7000 IEM_MC_ARG(uint32_t, u32Src, 2);
7001 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7002
7003 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7004 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7005 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
7006 IEM_MC_REF_EFLAGS(pEFlags);
7007 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7008 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7009 else
7010 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7011
7012 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
7013 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7014 IEM_MC_ADVANCE_RIP();
7015 IEM_MC_END();
7016 return VINF_SUCCESS;
7017
7018 case IEMMODE_64BIT:
7019 IEM_MC_BEGIN(4, 0);
7020 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7021 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7022#ifdef RT_ARCH_X86
7023 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7024#else
7025 IEM_MC_ARG(uint64_t, u64Src, 2);
7026#endif
7027 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7028
7029 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7030 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
7031 IEM_MC_REF_EFLAGS(pEFlags);
7032#ifdef RT_ARCH_X86
7033 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7034 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7035 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7036 else
7037 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7038#else
7039 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7040 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7041 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7042 else
7043 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7044#endif
7045
7046 IEM_MC_ADVANCE_RIP();
7047 IEM_MC_END();
7048 return VINF_SUCCESS;
7049
7050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7051 }
7052 }
7053 else
7054 {
7055 switch (pVCpu->iem.s.enmEffOpSize)
7056 {
7057 case IEMMODE_16BIT:
7058 IEM_MC_BEGIN(4, 3);
7059 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7060 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
7061 IEM_MC_ARG(uint16_t, u16Src, 2);
7062 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7064 IEM_MC_LOCAL(uint16_t, u16Ax);
7065
7066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7067 IEMOP_HLP_DONE_DECODING();
7068 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7069 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7070 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
7071 IEM_MC_FETCH_EFLAGS(EFlags);
7072 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
7073 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7074 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
7075 else
7076 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
7077
7078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7079 IEM_MC_COMMIT_EFLAGS(EFlags);
7080 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
7081 IEM_MC_ADVANCE_RIP();
7082 IEM_MC_END();
7083 return VINF_SUCCESS;
7084
7085 case IEMMODE_32BIT:
7086 IEM_MC_BEGIN(4, 3);
7087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7088 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
7089 IEM_MC_ARG(uint32_t, u32Src, 2);
7090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7092 IEM_MC_LOCAL(uint32_t, u32Eax);
7093
7094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7095 IEMOP_HLP_DONE_DECODING();
7096 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7097 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7098 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
7099 IEM_MC_FETCH_EFLAGS(EFlags);
7100 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
7101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7102 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
7103 else
7104 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
7105
7106 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7107 IEM_MC_COMMIT_EFLAGS(EFlags);
7108 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
7109 IEM_MC_ADVANCE_RIP();
7110 IEM_MC_END();
7111 return VINF_SUCCESS;
7112
7113 case IEMMODE_64BIT:
7114 IEM_MC_BEGIN(4, 3);
7115 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7116 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
7117#ifdef RT_ARCH_X86
7118 IEM_MC_ARG(uint64_t *, pu64Src, 2);
7119#else
7120 IEM_MC_ARG(uint64_t, u64Src, 2);
7121#endif
7122 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7124 IEM_MC_LOCAL(uint64_t, u64Rax);
7125
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7127 IEMOP_HLP_DONE_DECODING();
7128 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7129 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
7130 IEM_MC_FETCH_EFLAGS(EFlags);
7131 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
7132#ifdef RT_ARCH_X86
7133 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7134 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7135 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
7136 else
7137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
7138#else
7139 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7140 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7141 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
7142 else
7143 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
7144#endif
7145
7146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7147 IEM_MC_COMMIT_EFLAGS(EFlags);
7148 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
7149 IEM_MC_ADVANCE_RIP();
7150 IEM_MC_END();
7151 return VINF_SUCCESS;
7152
7153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7154 }
7155 }
7156}
7157
7158
7159FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
7160{
7161 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
7162 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
7163
7164 switch (pVCpu->iem.s.enmEffOpSize)
7165 {
7166 case IEMMODE_16BIT:
7167 IEM_MC_BEGIN(5, 1);
7168 IEM_MC_ARG(uint16_t, uSel, 0);
7169 IEM_MC_ARG(uint16_t, offSeg, 1);
7170 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7171 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7172 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7173 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7176 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7177 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
7178 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7179 IEM_MC_END();
7180 return VINF_SUCCESS;
7181
7182 case IEMMODE_32BIT:
7183 IEM_MC_BEGIN(5, 1);
7184 IEM_MC_ARG(uint16_t, uSel, 0);
7185 IEM_MC_ARG(uint32_t, offSeg, 1);
7186 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7187 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7188 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7189 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7192 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7193 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
7194 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7195 IEM_MC_END();
7196 return VINF_SUCCESS;
7197
7198 case IEMMODE_64BIT:
7199 IEM_MC_BEGIN(5, 1);
7200 IEM_MC_ARG(uint16_t, uSel, 0);
7201 IEM_MC_ARG(uint64_t, offSeg, 1);
7202 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
7203 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
7204 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
7205 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
7206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7208 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
7209 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7210 else
7211 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
7212 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
7213 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
7214 IEM_MC_END();
7215 return VINF_SUCCESS;
7216
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219}
7220
7221
7222/** Opcode 0x0f 0xb2. */
7223FNIEMOP_DEF(iemOp_lss_Gv_Mp)
7224{
7225 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
7226 IEMOP_HLP_MIN_386();
7227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7229 return IEMOP_RAISE_INVALID_OPCODE();
7230 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
7231}
7232
7233
7234/** Opcode 0x0f 0xb3. */
7235FNIEMOP_DEF(iemOp_btr_Ev_Gv)
7236{
7237 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
7238 IEMOP_HLP_MIN_386();
7239 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
7240}
7241
7242
7243/** Opcode 0x0f 0xb4. */
7244FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
7245{
7246 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
7247 IEMOP_HLP_MIN_386();
7248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7250 return IEMOP_RAISE_INVALID_OPCODE();
7251 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
7252}
7253
7254
7255/** Opcode 0x0f 0xb5. */
7256FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
7257{
7258 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
7259 IEMOP_HLP_MIN_386();
7260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7262 return IEMOP_RAISE_INVALID_OPCODE();
7263 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
7264}
7265
7266
7267/** Opcode 0x0f 0xb6. */
7268FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
7269{
7270 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
7271 IEMOP_HLP_MIN_386();
7272
7273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7274
7275 /*
7276 * If rm is denoting a register, no more instruction bytes.
7277 */
7278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7279 {
7280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7281 switch (pVCpu->iem.s.enmEffOpSize)
7282 {
7283 case IEMMODE_16BIT:
7284 IEM_MC_BEGIN(0, 1);
7285 IEM_MC_LOCAL(uint16_t, u16Value);
7286 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7287 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7288 IEM_MC_ADVANCE_RIP();
7289 IEM_MC_END();
7290 return VINF_SUCCESS;
7291
7292 case IEMMODE_32BIT:
7293 IEM_MC_BEGIN(0, 1);
7294 IEM_MC_LOCAL(uint32_t, u32Value);
7295 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7296 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7297 IEM_MC_ADVANCE_RIP();
7298 IEM_MC_END();
7299 return VINF_SUCCESS;
7300
7301 case IEMMODE_64BIT:
7302 IEM_MC_BEGIN(0, 1);
7303 IEM_MC_LOCAL(uint64_t, u64Value);
7304 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7305 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7306 IEM_MC_ADVANCE_RIP();
7307 IEM_MC_END();
7308 return VINF_SUCCESS;
7309
7310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7311 }
7312 }
7313 else
7314 {
7315 /*
7316 * We're loading a register from memory.
7317 */
7318 switch (pVCpu->iem.s.enmEffOpSize)
7319 {
7320 case IEMMODE_16BIT:
7321 IEM_MC_BEGIN(0, 2);
7322 IEM_MC_LOCAL(uint16_t, u16Value);
7323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7326 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7327 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7328 IEM_MC_ADVANCE_RIP();
7329 IEM_MC_END();
7330 return VINF_SUCCESS;
7331
7332 case IEMMODE_32BIT:
7333 IEM_MC_BEGIN(0, 2);
7334 IEM_MC_LOCAL(uint32_t, u32Value);
7335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7338 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7339 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7340 IEM_MC_ADVANCE_RIP();
7341 IEM_MC_END();
7342 return VINF_SUCCESS;
7343
7344 case IEMMODE_64BIT:
7345 IEM_MC_BEGIN(0, 2);
7346 IEM_MC_LOCAL(uint64_t, u64Value);
7347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7350 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7351 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 return VINF_SUCCESS;
7355
7356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7357 }
7358 }
7359}
7360
7361
7362/** Opcode 0x0f 0xb7. */
7363FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
7364{
7365 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
7366 IEMOP_HLP_MIN_386();
7367
7368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7369
7370 /** @todo Not entirely sure how the operand size prefix is handled here,
7371 * assuming that it will be ignored. Would be nice to have a few
7372 * test for this. */
7373 /*
7374 * If rm is denoting a register, no more instruction bytes.
7375 */
7376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7377 {
7378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7379 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7380 {
7381 IEM_MC_BEGIN(0, 1);
7382 IEM_MC_LOCAL(uint32_t, u32Value);
7383 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7384 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7385 IEM_MC_ADVANCE_RIP();
7386 IEM_MC_END();
7387 }
7388 else
7389 {
7390 IEM_MC_BEGIN(0, 1);
7391 IEM_MC_LOCAL(uint64_t, u64Value);
7392 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7393 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7394 IEM_MC_ADVANCE_RIP();
7395 IEM_MC_END();
7396 }
7397 }
7398 else
7399 {
7400 /*
7401 * We're loading a register from memory.
7402 */
7403 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7404 {
7405 IEM_MC_BEGIN(0, 2);
7406 IEM_MC_LOCAL(uint32_t, u32Value);
7407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7410 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7411 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7412 IEM_MC_ADVANCE_RIP();
7413 IEM_MC_END();
7414 }
7415 else
7416 {
7417 IEM_MC_BEGIN(0, 2);
7418 IEM_MC_LOCAL(uint64_t, u64Value);
7419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7422 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7423 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7424 IEM_MC_ADVANCE_RIP();
7425 IEM_MC_END();
7426 }
7427 }
7428 return VINF_SUCCESS;
7429}
7430
7431
7432/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
7433FNIEMOP_UD_STUB(iemOp_jmpe);
7434/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
7435FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
7436
7437
7438/**
7439 * @opcode 0xb9
7440 * @opinvalid intel-modrm
7441 * @optest ->
7442 */
7443FNIEMOP_DEF(iemOp_Grp10)
7444{
7445 /*
7446 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
7447 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
7448 */
7449 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
7450 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
7451 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
7452}
7453
7454
7455/** Opcode 0x0f 0xba. */
7456FNIEMOP_DEF(iemOp_Grp8)
7457{
7458 IEMOP_HLP_MIN_386();
7459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7460 PCIEMOPBINSIZES pImpl;
7461 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7462 {
7463 case 0: case 1: case 2: case 3:
7464 /* Both AMD and Intel want full modr/m decoding and imm8. */
7465 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
7466 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
7467 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
7468 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
7469 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
7470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7471 }
7472 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7473
7474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7475 {
7476 /* register destination. */
7477 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7479
7480 switch (pVCpu->iem.s.enmEffOpSize)
7481 {
7482 case IEMMODE_16BIT:
7483 IEM_MC_BEGIN(3, 0);
7484 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7485 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
7486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7487
7488 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7489 IEM_MC_REF_EFLAGS(pEFlags);
7490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7491
7492 IEM_MC_ADVANCE_RIP();
7493 IEM_MC_END();
7494 return VINF_SUCCESS;
7495
7496 case IEMMODE_32BIT:
7497 IEM_MC_BEGIN(3, 0);
7498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7499 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
7500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7501
7502 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7503 IEM_MC_REF_EFLAGS(pEFlags);
7504 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7505
7506 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7507 IEM_MC_ADVANCE_RIP();
7508 IEM_MC_END();
7509 return VINF_SUCCESS;
7510
7511 case IEMMODE_64BIT:
7512 IEM_MC_BEGIN(3, 0);
7513 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7514 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
7515 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7516
7517 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7518 IEM_MC_REF_EFLAGS(pEFlags);
7519 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7520
7521 IEM_MC_ADVANCE_RIP();
7522 IEM_MC_END();
7523 return VINF_SUCCESS;
7524
7525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7526 }
7527 }
7528 else
7529 {
7530 /* memory destination. */
7531
7532 uint32_t fAccess;
7533 if (pImpl->pfnLockedU16)
7534 fAccess = IEM_ACCESS_DATA_RW;
7535 else /* BT */
7536 fAccess = IEM_ACCESS_DATA_R;
7537
7538 /** @todo test negative bit offsets! */
7539 switch (pVCpu->iem.s.enmEffOpSize)
7540 {
7541 case IEMMODE_16BIT:
7542 IEM_MC_BEGIN(3, 1);
7543 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7544 IEM_MC_ARG(uint16_t, u16Src, 1);
7545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7547
7548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7549 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7550 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
7551 if (pImpl->pfnLockedU16)
7552 IEMOP_HLP_DONE_DECODING();
7553 else
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 IEM_MC_FETCH_EFLAGS(EFlags);
7556 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7557 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7559 else
7560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7562
7563 IEM_MC_COMMIT_EFLAGS(EFlags);
7564 IEM_MC_ADVANCE_RIP();
7565 IEM_MC_END();
7566 return VINF_SUCCESS;
7567
7568 case IEMMODE_32BIT:
7569 IEM_MC_BEGIN(3, 1);
7570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7571 IEM_MC_ARG(uint32_t, u32Src, 1);
7572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7574
7575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7576 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7577 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
7578 if (pImpl->pfnLockedU16)
7579 IEMOP_HLP_DONE_DECODING();
7580 else
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_FETCH_EFLAGS(EFlags);
7583 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7584 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7586 else
7587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7588 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7589
7590 IEM_MC_COMMIT_EFLAGS(EFlags);
7591 IEM_MC_ADVANCE_RIP();
7592 IEM_MC_END();
7593 return VINF_SUCCESS;
7594
7595 case IEMMODE_64BIT:
7596 IEM_MC_BEGIN(3, 1);
7597 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7598 IEM_MC_ARG(uint64_t, u64Src, 1);
7599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7601
7602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7603 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
7604 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
7605 if (pImpl->pfnLockedU16)
7606 IEMOP_HLP_DONE_DECODING();
7607 else
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7609 IEM_MC_FETCH_EFLAGS(EFlags);
7610 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7611 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7613 else
7614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7615 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7616
7617 IEM_MC_COMMIT_EFLAGS(EFlags);
7618 IEM_MC_ADVANCE_RIP();
7619 IEM_MC_END();
7620 return VINF_SUCCESS;
7621
7622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7623 }
7624 }
7625}
7626
7627
7628/** Opcode 0x0f 0xbb. */
7629FNIEMOP_DEF(iemOp_btc_Ev_Gv)
7630{
7631 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
7632 IEMOP_HLP_MIN_386();
7633 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
7634}
7635
7636
7637/** Opcode 0x0f 0xbc. */
7638FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
7639{
7640 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
7641 IEMOP_HLP_MIN_386();
7642 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
7644}
7645
7646
7647/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
7648FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
7649
7650
7651/** Opcode 0x0f 0xbd. */
7652FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
7653{
7654 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
7655 IEMOP_HLP_MIN_386();
7656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
7657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
7658}
7659
7660
7661/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
7662FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
7663
7664
7665/** Opcode 0x0f 0xbe. */
7666FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
7667{
7668 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
7669 IEMOP_HLP_MIN_386();
7670
7671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7672
7673 /*
7674 * If rm is denoting a register, no more instruction bytes.
7675 */
7676 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7677 {
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679 switch (pVCpu->iem.s.enmEffOpSize)
7680 {
7681 case IEMMODE_16BIT:
7682 IEM_MC_BEGIN(0, 1);
7683 IEM_MC_LOCAL(uint16_t, u16Value);
7684 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7685 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7686 IEM_MC_ADVANCE_RIP();
7687 IEM_MC_END();
7688 return VINF_SUCCESS;
7689
7690 case IEMMODE_32BIT:
7691 IEM_MC_BEGIN(0, 1);
7692 IEM_MC_LOCAL(uint32_t, u32Value);
7693 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7694 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7695 IEM_MC_ADVANCE_RIP();
7696 IEM_MC_END();
7697 return VINF_SUCCESS;
7698
7699 case IEMMODE_64BIT:
7700 IEM_MC_BEGIN(0, 1);
7701 IEM_MC_LOCAL(uint64_t, u64Value);
7702 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7703 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7704 IEM_MC_ADVANCE_RIP();
7705 IEM_MC_END();
7706 return VINF_SUCCESS;
7707
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7709 }
7710 }
7711 else
7712 {
7713 /*
7714 * We're loading a register from memory.
7715 */
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 IEM_MC_BEGIN(0, 2);
7720 IEM_MC_LOCAL(uint16_t, u16Value);
7721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7725 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
7726 IEM_MC_ADVANCE_RIP();
7727 IEM_MC_END();
7728 return VINF_SUCCESS;
7729
7730 case IEMMODE_32BIT:
7731 IEM_MC_BEGIN(0, 2);
7732 IEM_MC_LOCAL(uint32_t, u32Value);
7733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7736 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7737 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7738 IEM_MC_ADVANCE_RIP();
7739 IEM_MC_END();
7740 return VINF_SUCCESS;
7741
7742 case IEMMODE_64BIT:
7743 IEM_MC_BEGIN(0, 2);
7744 IEM_MC_LOCAL(uint64_t, u64Value);
7745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7749 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7750 IEM_MC_ADVANCE_RIP();
7751 IEM_MC_END();
7752 return VINF_SUCCESS;
7753
7754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7755 }
7756 }
7757}
7758
7759
7760/** Opcode 0x0f 0xbf. */
7761FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
7762{
7763 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
7764 IEMOP_HLP_MIN_386();
7765
7766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7767
7768 /** @todo Not entirely sure how the operand size prefix is handled here,
7769 * assuming that it will be ignored. Would be nice to have a few
7770 * test for this. */
7771 /*
7772 * If rm is denoting a register, no more instruction bytes.
7773 */
7774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7775 {
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7778 {
7779 IEM_MC_BEGIN(0, 1);
7780 IEM_MC_LOCAL(uint32_t, u32Value);
7781 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7782 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7783 IEM_MC_ADVANCE_RIP();
7784 IEM_MC_END();
7785 }
7786 else
7787 {
7788 IEM_MC_BEGIN(0, 1);
7789 IEM_MC_LOCAL(uint64_t, u64Value);
7790 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7791 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7792 IEM_MC_ADVANCE_RIP();
7793 IEM_MC_END();
7794 }
7795 }
7796 else
7797 {
7798 /*
7799 * We're loading a register from memory.
7800 */
7801 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
7802 {
7803 IEM_MC_BEGIN(0, 2);
7804 IEM_MC_LOCAL(uint32_t, u32Value);
7805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7809 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
7810 IEM_MC_ADVANCE_RIP();
7811 IEM_MC_END();
7812 }
7813 else
7814 {
7815 IEM_MC_BEGIN(0, 2);
7816 IEM_MC_LOCAL(uint64_t, u64Value);
7817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7820 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7821 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
7822 IEM_MC_ADVANCE_RIP();
7823 IEM_MC_END();
7824 }
7825 }
7826 return VINF_SUCCESS;
7827}
7828
7829
7830/** Opcode 0x0f 0xc0. */
7831FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
7832{
7833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7834 IEMOP_HLP_MIN_486();
7835 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
7836
7837 /*
7838 * If rm is denoting a register, no more instruction bytes.
7839 */
7840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7841 {
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843
7844 IEM_MC_BEGIN(3, 0);
7845 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7846 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7848
7849 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7850 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7851 IEM_MC_REF_EFLAGS(pEFlags);
7852 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7853
7854 IEM_MC_ADVANCE_RIP();
7855 IEM_MC_END();
7856 }
7857 else
7858 {
7859 /*
7860 * We're accessing memory.
7861 */
7862 IEM_MC_BEGIN(3, 3);
7863 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7864 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
7865 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7866 IEM_MC_LOCAL(uint8_t, u8RegCopy);
7867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7868
7869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7870 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7871 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7872 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
7873 IEM_MC_FETCH_EFLAGS(EFlags);
7874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7875 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
7876 else
7877 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
7878
7879 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7880 IEM_MC_COMMIT_EFLAGS(EFlags);
7881 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
7882 IEM_MC_ADVANCE_RIP();
7883 IEM_MC_END();
7884 return VINF_SUCCESS;
7885 }
7886 return VINF_SUCCESS;
7887}
7888
7889
7890/** Opcode 0x0f 0xc1. */
7891FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
7892{
7893 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
7894 IEMOP_HLP_MIN_486();
7895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7896
7897 /*
7898 * If rm is denoting a register, no more instruction bytes.
7899 */
7900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7901 {
7902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7903
7904 switch (pVCpu->iem.s.enmEffOpSize)
7905 {
7906 case IEMMODE_16BIT:
7907 IEM_MC_BEGIN(3, 0);
7908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7909 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7910 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7911
7912 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7913 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7914 IEM_MC_REF_EFLAGS(pEFlags);
7915 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7916
7917 IEM_MC_ADVANCE_RIP();
7918 IEM_MC_END();
7919 return VINF_SUCCESS;
7920
7921 case IEMMODE_32BIT:
7922 IEM_MC_BEGIN(3, 0);
7923 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7924 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7926
7927 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7928 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7929 IEM_MC_REF_EFLAGS(pEFlags);
7930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7931
7932 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7933 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
7934 IEM_MC_ADVANCE_RIP();
7935 IEM_MC_END();
7936 return VINF_SUCCESS;
7937
7938 case IEMMODE_64BIT:
7939 IEM_MC_BEGIN(3, 0);
7940 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7941 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7942 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7943
7944 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7945 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7946 IEM_MC_REF_EFLAGS(pEFlags);
7947 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7948
7949 IEM_MC_ADVANCE_RIP();
7950 IEM_MC_END();
7951 return VINF_SUCCESS;
7952
7953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7954 }
7955 }
7956 else
7957 {
7958 /*
7959 * We're accessing memory.
7960 */
7961 switch (pVCpu->iem.s.enmEffOpSize)
7962 {
7963 case IEMMODE_16BIT:
7964 IEM_MC_BEGIN(3, 3);
7965 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7966 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7967 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7968 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7970
7971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7972 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7973 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7974 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7975 IEM_MC_FETCH_EFLAGS(EFlags);
7976 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7977 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7978 else
7979 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7980
7981 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7982 IEM_MC_COMMIT_EFLAGS(EFlags);
7983 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7984 IEM_MC_ADVANCE_RIP();
7985 IEM_MC_END();
7986 return VINF_SUCCESS;
7987
7988 case IEMMODE_32BIT:
7989 IEM_MC_BEGIN(3, 3);
7990 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7991 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7992 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7993 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7995
7996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7997 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7998 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7999 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
8000 IEM_MC_FETCH_EFLAGS(EFlags);
8001 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
8003 else
8004 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
8005
8006 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8007 IEM_MC_COMMIT_EFLAGS(EFlags);
8008 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
8009 IEM_MC_ADVANCE_RIP();
8010 IEM_MC_END();
8011 return VINF_SUCCESS;
8012
8013 case IEMMODE_64BIT:
8014 IEM_MC_BEGIN(3, 3);
8015 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8016 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8017 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8018 IEM_MC_LOCAL(uint64_t, u64RegCopy);
8019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8020
8021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8022 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8023 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8024 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
8025 IEM_MC_FETCH_EFLAGS(EFlags);
8026 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
8028 else
8029 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
8030
8031 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8032 IEM_MC_COMMIT_EFLAGS(EFlags);
8033 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
8034 IEM_MC_ADVANCE_RIP();
8035 IEM_MC_END();
8036 return VINF_SUCCESS;
8037
8038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8039 }
8040 }
8041}
8042
8043
8044/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
8045FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
8046/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
8047FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
8048/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
8049FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
8050/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
8051FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
8052
8053
8054/** Opcode 0x0f 0xc3. */
8055FNIEMOP_DEF(iemOp_movnti_My_Gy)
8056{
8057 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
8058
8059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8060
8061 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
8062 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8063 {
8064 switch (pVCpu->iem.s.enmEffOpSize)
8065 {
8066 case IEMMODE_32BIT:
8067 IEM_MC_BEGIN(0, 2);
8068 IEM_MC_LOCAL(uint32_t, u32Value);
8069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8070
8071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8073 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8074 return IEMOP_RAISE_INVALID_OPCODE();
8075
8076 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8077 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
8078 IEM_MC_ADVANCE_RIP();
8079 IEM_MC_END();
8080 break;
8081
8082 case IEMMODE_64BIT:
8083 IEM_MC_BEGIN(0, 2);
8084 IEM_MC_LOCAL(uint64_t, u64Value);
8085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8086
8087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8089 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8090 return IEMOP_RAISE_INVALID_OPCODE();
8091
8092 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8093 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
8094 IEM_MC_ADVANCE_RIP();
8095 IEM_MC_END();
8096 break;
8097
8098 case IEMMODE_16BIT:
8099 /** @todo check this form. */
8100 return IEMOP_RAISE_INVALID_OPCODE();
8101 }
8102 }
8103 else
8104 return IEMOP_RAISE_INVALID_OPCODE();
8105 return VINF_SUCCESS;
8106}
8107/* Opcode 0x66 0x0f 0xc3 - invalid */
8108/* Opcode 0xf3 0x0f 0xc3 - invalid */
8109/* Opcode 0xf2 0x0f 0xc3 - invalid */
8110
8111/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
8112FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
8113/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
8114FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
8115/* Opcode 0xf3 0x0f 0xc4 - invalid */
8116/* Opcode 0xf2 0x0f 0xc4 - invalid */
8117
8118/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
8119FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
8120/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
8121FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
8122/* Opcode 0xf3 0x0f 0xc5 - invalid */
8123/* Opcode 0xf2 0x0f 0xc5 - invalid */
8124
8125/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
8126FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
8127/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
8128FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
8129/* Opcode 0xf3 0x0f 0xc6 - invalid */
8130/* Opcode 0xf2 0x0f 0xc6 - invalid */
8131
8132
8133/** Opcode 0x0f 0xc7 !11/1. */
8134FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
8135{
8136 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
8137
8138 IEM_MC_BEGIN(4, 3);
8139 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
8140 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
8141 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
8142 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8143 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
8144 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
8145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8146
8147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8148 IEMOP_HLP_DONE_DECODING();
8149 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8150
8151 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
8152 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
8153 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
8154
8155 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
8156 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
8157 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
8158
8159 IEM_MC_FETCH_EFLAGS(EFlags);
8160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8161 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8162 else
8163 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
8164
8165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
8166 IEM_MC_COMMIT_EFLAGS(EFlags);
8167 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8168 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
8169 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
8170 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
8171 IEM_MC_ENDIF();
8172 IEM_MC_ADVANCE_RIP();
8173
8174 IEM_MC_END();
8175 return VINF_SUCCESS;
8176}
8177
8178
8179/** Opcode REX.W 0x0f 0xc7 !11/1. */
8180FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
8181{
8182 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
8183 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8184 {
8185#if 0
8186 RT_NOREF(bRm);
8187 IEMOP_BITCH_ABOUT_STUB();
8188 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8189#else
8190 IEM_MC_BEGIN(4, 3);
8191 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
8192 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
8193 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
8194 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
8195 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
8196 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
8197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8198
8199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8200 IEMOP_HLP_DONE_DECODING();
8201 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
8202 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8203
8204 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
8205 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
8206 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
8207
8208 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
8209 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
8210 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
8211
8212 IEM_MC_FETCH_EFLAGS(EFlags);
8213# ifdef RT_ARCH_AMD64
8214 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
8215 {
8216 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8217 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8218 else
8219 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8220 }
8221 else
8222# endif
8223 {
8224 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
8225 accesses and not all all atomic, which works fine on in UNI CPU guest
8226 configuration (ignoring DMA). If guest SMP is active we have no choice
8227 but to use a rendezvous callback here. Sigh. */
8228 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
8229 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8230 else
8231 {
8232 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
8233 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
8234 }
8235 }
8236
8237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
8238 IEM_MC_COMMIT_EFLAGS(EFlags);
8239 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8240 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
8241 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
8242 IEM_MC_ENDIF();
8243 IEM_MC_ADVANCE_RIP();
8244
8245 IEM_MC_END();
8246 return VINF_SUCCESS;
8247#endif
8248 }
8249 Log(("cmpxchg16b -> #UD\n"));
8250 return IEMOP_RAISE_INVALID_OPCODE();
8251}
8252
8253FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
8254{
8255 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
8256 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
8257 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
8258}
8259
8260/** Opcode 0x0f 0xc7 11/6. */
8261FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
8262
8263/** Opcode 0x0f 0xc7 !11/6. */
8264FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
8265
8266/** Opcode 0x66 0x0f 0xc7 !11/6. */
8267FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
8268
8269/** Opcode 0xf3 0x0f 0xc7 !11/6. */
8270FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
8271
8272/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
8273FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
8274
8275/** Opcode 0x0f 0xc7 11/7. */
8276FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
8277
8278
8279/**
8280 * Group 9 jump table for register variant.
8281 */
8282IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
8283{ /* pfx: none, 066h, 0f3h, 0f2h */
8284 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8285 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
8286 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8287 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8288 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8289 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8290 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8291 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8292};
8293AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
8294
8295
8296/**
8297 * Group 9 jump table for memory variant.
8298 */
8299IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
8300{ /* pfx: none, 066h, 0f3h, 0f2h */
8301 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
8302 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
8303 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
8304 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
8305 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8306 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
8307 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
8308 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8309};
8310AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
8311
8312
8313/** Opcode 0x0f 0xc7. */
8314FNIEMOP_DEF(iemOp_Grp9)
8315{
8316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8318 /* register, register */
8319 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8320 + pVCpu->iem.s.idxPrefix], bRm);
8321 /* memory, register */
8322 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
8323 + pVCpu->iem.s.idxPrefix], bRm);
8324}
8325
8326
8327/**
8328 * Common 'bswap register' helper.
8329 */
8330FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
8331{
8332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8333 switch (pVCpu->iem.s.enmEffOpSize)
8334 {
8335 case IEMMODE_16BIT:
8336 IEM_MC_BEGIN(1, 0);
8337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8338 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
8339 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
8340 IEM_MC_ADVANCE_RIP();
8341 IEM_MC_END();
8342 return VINF_SUCCESS;
8343
8344 case IEMMODE_32BIT:
8345 IEM_MC_BEGIN(1, 0);
8346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8347 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8348 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8349 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
8350 IEM_MC_ADVANCE_RIP();
8351 IEM_MC_END();
8352 return VINF_SUCCESS;
8353
8354 case IEMMODE_64BIT:
8355 IEM_MC_BEGIN(1, 0);
8356 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8357 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8358 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
8359 IEM_MC_ADVANCE_RIP();
8360 IEM_MC_END();
8361 return VINF_SUCCESS;
8362
8363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8364 }
8365}
8366
8367
8368/** Opcode 0x0f 0xc8. */
8369FNIEMOP_DEF(iemOp_bswap_rAX_r8)
8370{
8371 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
8372 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
8373 prefix. REX.B is the correct prefix it appears. For a parallel
8374 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
8375 IEMOP_HLP_MIN_486();
8376 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8377}
8378
8379
8380/** Opcode 0x0f 0xc9. */
8381FNIEMOP_DEF(iemOp_bswap_rCX_r9)
8382{
8383 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
8384 IEMOP_HLP_MIN_486();
8385 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8386}
8387
8388
8389/** Opcode 0x0f 0xca. */
8390FNIEMOP_DEF(iemOp_bswap_rDX_r10)
8391{
8392 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
8393 IEMOP_HLP_MIN_486();
8394 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8395}
8396
8397
8398/** Opcode 0x0f 0xcb. */
8399FNIEMOP_DEF(iemOp_bswap_rBX_r11)
8400{
8401 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
8402 IEMOP_HLP_MIN_486();
8403 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8404}
8405
8406
8407/** Opcode 0x0f 0xcc. */
8408FNIEMOP_DEF(iemOp_bswap_rSP_r12)
8409{
8410 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
8411 IEMOP_HLP_MIN_486();
8412 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8413}
8414
8415
8416/** Opcode 0x0f 0xcd. */
8417FNIEMOP_DEF(iemOp_bswap_rBP_r13)
8418{
8419 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
8420 IEMOP_HLP_MIN_486();
8421 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8422}
8423
8424
8425/** Opcode 0x0f 0xce. */
8426FNIEMOP_DEF(iemOp_bswap_rSI_r14)
8427{
8428 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
8429 IEMOP_HLP_MIN_486();
8430 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8431}
8432
8433
8434/** Opcode 0x0f 0xcf. */
8435FNIEMOP_DEF(iemOp_bswap_rDI_r15)
8436{
8437 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
8438 IEMOP_HLP_MIN_486();
8439 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8440}
8441
8442
8443/* Opcode 0x0f 0xd0 - invalid */
8444/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
8445FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
8446/* Opcode 0xf3 0x0f 0xd0 - invalid */
8447/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
8448FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
8449
8450/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
8451FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
8452/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */
8453FNIEMOP_STUB(iemOp_psrlw_Vx_W);
8454/* Opcode 0xf3 0x0f 0xd1 - invalid */
8455/* Opcode 0xf2 0x0f 0xd1 - invalid */
8456
8457/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
8458FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
8459/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
8460FNIEMOP_STUB(iemOp_psrld_Vx_Wx);
8461/* Opcode 0xf3 0x0f 0xd2 - invalid */
8462/* Opcode 0xf2 0x0f 0xd2 - invalid */
8463
8464/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
8465FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
8466/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
8467FNIEMOP_STUB(iemOp_psrlq_Vx_Wx);
8468/* Opcode 0xf3 0x0f 0xd3 - invalid */
8469/* Opcode 0xf2 0x0f 0xd3 - invalid */
8470
8471/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
8472FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
8473/** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */
8474FNIEMOP_STUB(iemOp_paddq_Vx_W);
8475/* Opcode 0xf3 0x0f 0xd4 - invalid */
8476/* Opcode 0xf2 0x0f 0xd4 - invalid */
8477
8478/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
8479FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
8480/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
8481FNIEMOP_STUB(iemOp_pmullw_Vx_Wx);
8482/* Opcode 0xf3 0x0f 0xd5 - invalid */
8483/* Opcode 0xf2 0x0f 0xd5 - invalid */
8484
8485/* Opcode 0x0f 0xd6 - invalid */
8486
8487/**
8488 * @opcode 0xd6
8489 * @oppfx 0x66
8490 * @opcpuid sse2
8491 * @opgroup og_sse2_pcksclr_datamove
8492 * @opxcpttype none
8493 * @optest op1=-1 op2=2 -> op1=2
8494 * @optest op1=0 op2=-42 -> op1=-42
8495 */
8496FNIEMOP_DEF(iemOp_movq_Wq_Vq)
8497{
8498 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8500 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8501 {
8502 /*
8503 * Register, register.
8504 */
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_BEGIN(0, 2);
8507 IEM_MC_LOCAL(uint64_t, uSrc);
8508
8509 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
8511
8512 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8513 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
8514
8515 IEM_MC_ADVANCE_RIP();
8516 IEM_MC_END();
8517 }
8518 else
8519 {
8520 /*
8521 * Memory, register.
8522 */
8523 IEM_MC_BEGIN(0, 2);
8524 IEM_MC_LOCAL(uint64_t, uSrc);
8525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8526
8527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8531
8532 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8533 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8534
8535 IEM_MC_ADVANCE_RIP();
8536 IEM_MC_END();
8537 }
8538 return VINF_SUCCESS;
8539}
8540
8541
8542/**
8543 * @opcode 0xd6
8544 * @opcodesub 11 mr/reg
8545 * @oppfx f3
8546 * @opcpuid sse2
8547 * @opgroup og_sse2_simdint_datamove
8548 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8549 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8550 */
8551FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
8552{
8553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8555 {
8556 /*
8557 * Register, register.
8558 */
8559 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8561 IEM_MC_BEGIN(0, 1);
8562 IEM_MC_LOCAL(uint64_t, uSrc);
8563
8564 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8565 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8566
8567 IEM_MC_FETCH_MREG_U64(uSrc, bRm & X86_MODRM_RM_MASK);
8568 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
8569 IEM_MC_FPU_TO_MMX_MODE();
8570
8571 IEM_MC_ADVANCE_RIP();
8572 IEM_MC_END();
8573 return VINF_SUCCESS;
8574 }
8575
8576 /**
8577 * @opdone
8578 * @opmnemonic udf30fd6mem
8579 * @opcode 0xd6
8580 * @opcodesub !11 mr/reg
8581 * @oppfx f3
8582 * @opunused intel-modrm
8583 * @opcpuid sse
8584 * @optest ->
8585 */
8586 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8587}
8588
8589
8590/**
8591 * @opcode 0xd6
8592 * @opcodesub 11 mr/reg
8593 * @oppfx f2
8594 * @opcpuid sse2
8595 * @opgroup og_sse2_simdint_datamove
8596 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
8597 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
8598 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
8599 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
8600 * @optest op1=-42 op2=0xfedcba9876543210
8601 * -> op1=0xfedcba9876543210 ftw=0xff
8602 */
8603FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
8604{
8605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8607 {
8608 /*
8609 * Register, register.
8610 */
8611 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEM_MC_BEGIN(0, 1);
8614 IEM_MC_LOCAL(uint64_t, uSrc);
8615
8616 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8617 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8618
8619 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8620 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, uSrc);
8621 IEM_MC_FPU_TO_MMX_MODE();
8622
8623 IEM_MC_ADVANCE_RIP();
8624 IEM_MC_END();
8625 return VINF_SUCCESS;
8626 }
8627
8628 /**
8629 * @opdone
8630 * @opmnemonic udf20fd6mem
8631 * @opcode 0xd6
8632 * @opcodesub !11 mr/reg
8633 * @oppfx f2
8634 * @opunused intel-modrm
8635 * @opcpuid sse
8636 * @optest ->
8637 */
8638 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
8639}
8640
8641/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
8642FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
8643{
8644 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8645 /** @todo testcase: Check that the instruction implicitly clears the high
8646 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8647 * and opcode modifications are made to work with the whole width (not
8648 * just 128). */
8649 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq");
8650 /* Docs says register only. */
8651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8653 {
8654 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
8655 IEM_MC_BEGIN(2, 0);
8656 IEM_MC_ARG(uint64_t *, pDst, 0);
8657 IEM_MC_ARG(uint64_t const *, pSrc, 1);
8658 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
8659 IEM_MC_PREPARE_FPU_USAGE();
8660 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8661 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
8662 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
8663 IEM_MC_ADVANCE_RIP();
8664 IEM_MC_END();
8665 return VINF_SUCCESS;
8666 }
8667 return IEMOP_RAISE_INVALID_OPCODE();
8668}
8669
8670/** Opcode 0x66 0x0f 0xd7 - */
8671FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
8672{
8673 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
8674 /** @todo testcase: Check that the instruction implicitly clears the high
8675 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
8676 * and opcode modifications are made to work with the whole width (not
8677 * just 128). */
8678 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
8679 /* Docs says register only. */
8680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8681 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
8682 {
8683 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
8684 IEM_MC_BEGIN(2, 0);
8685 IEM_MC_ARG(uint64_t *, pDst, 0);
8686 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
8687 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8688 IEM_MC_PREPARE_SSE_USAGE();
8689 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8690 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8691 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
8692 IEM_MC_ADVANCE_RIP();
8693 IEM_MC_END();
8694 return VINF_SUCCESS;
8695 }
8696 return IEMOP_RAISE_INVALID_OPCODE();
8697}
8698
8699/* Opcode 0xf3 0x0f 0xd7 - invalid */
8700/* Opcode 0xf2 0x0f 0xd7 - invalid */
8701
8702
8703/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
8704FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
8705/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */
8706FNIEMOP_STUB(iemOp_psubusb_Vx_W);
8707/* Opcode 0xf3 0x0f 0xd8 - invalid */
8708/* Opcode 0xf2 0x0f 0xd8 - invalid */
8709
8710/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
8711FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
8712/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
8713FNIEMOP_STUB(iemOp_psubusw_Vx_Wx);
8714/* Opcode 0xf3 0x0f 0xd9 - invalid */
8715/* Opcode 0xf2 0x0f 0xd9 - invalid */
8716
8717/** Opcode 0x0f 0xda - pminub Pq, Qq */
8718FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
8719/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
8720FNIEMOP_STUB(iemOp_pminub_Vx_Wx);
8721/* Opcode 0xf3 0x0f 0xda - invalid */
8722/* Opcode 0xf2 0x0f 0xda - invalid */
8723
8724/** Opcode 0x0f 0xdb - pand Pq, Qq */
8725FNIEMOP_STUB(iemOp_pand_Pq_Qq);
8726/** Opcode 0x66 0x0f 0xdb - pand Vx, W */
8727FNIEMOP_STUB(iemOp_pand_Vx_W);
8728/* Opcode 0xf3 0x0f 0xdb - invalid */
8729/* Opcode 0xf2 0x0f 0xdb - invalid */
8730
8731/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
8732FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
8733/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
8734FNIEMOP_STUB(iemOp_paddusb_Vx_Wx);
8735/* Opcode 0xf3 0x0f 0xdc - invalid */
8736/* Opcode 0xf2 0x0f 0xdc - invalid */
8737
8738/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
8739FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
8740/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
8741FNIEMOP_STUB(iemOp_paddusw_Vx_Wx);
8742/* Opcode 0xf3 0x0f 0xdd - invalid */
8743/* Opcode 0xf2 0x0f 0xdd - invalid */
8744
8745/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
8746FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
8747/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
8748FNIEMOP_STUB(iemOp_pmaxub_Vx_W);
8749/* Opcode 0xf3 0x0f 0xde - invalid */
8750/* Opcode 0xf2 0x0f 0xde - invalid */
8751
8752/** Opcode 0x0f 0xdf - pandn Pq, Qq */
8753FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
8754/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
8755FNIEMOP_STUB(iemOp_pandn_Vx_Wx);
8756/* Opcode 0xf3 0x0f 0xdf - invalid */
8757/* Opcode 0xf2 0x0f 0xdf - invalid */
8758
8759/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
8760FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
8761/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
8762FNIEMOP_STUB(iemOp_pavgb_Vx_Wx);
8763/* Opcode 0xf3 0x0f 0xe0 - invalid */
8764/* Opcode 0xf2 0x0f 0xe0 - invalid */
8765
8766/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
8767FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
8768/** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */
8769FNIEMOP_STUB(iemOp_psraw_Vx_W);
8770/* Opcode 0xf3 0x0f 0xe1 - invalid */
8771/* Opcode 0xf2 0x0f 0xe1 - invalid */
8772
8773/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
8774FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
8775/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
8776FNIEMOP_STUB(iemOp_psrad_Vx_Wx);
8777/* Opcode 0xf3 0x0f 0xe2 - invalid */
8778/* Opcode 0xf2 0x0f 0xe2 - invalid */
8779
8780/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
8781FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
8782/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
8783FNIEMOP_STUB(iemOp_pavgw_Vx_Wx);
8784/* Opcode 0xf3 0x0f 0xe3 - invalid */
8785/* Opcode 0xf2 0x0f 0xe3 - invalid */
8786
8787/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
8788FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
8789/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */
8790FNIEMOP_STUB(iemOp_pmulhuw_Vx_W);
8791/* Opcode 0xf3 0x0f 0xe4 - invalid */
8792/* Opcode 0xf2 0x0f 0xe4 - invalid */
8793
8794/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
8795FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
8796/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
8797FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx);
8798/* Opcode 0xf3 0x0f 0xe5 - invalid */
8799/* Opcode 0xf2 0x0f 0xe5 - invalid */
8800
8801/* Opcode 0x0f 0xe6 - invalid */
8802/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
8803FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
8804/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
8805FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
8806/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
8807FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
8808
8809
8810/** Opcode 0x0f 0xe7 - movntq Mq, Pq */
8811FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
8812{
8813 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
8814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8815 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8816 {
8817 /* Register, memory. */
8818 IEM_MC_BEGIN(0, 2);
8819 IEM_MC_LOCAL(uint64_t, uSrc);
8820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8821
8822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8824 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
8825 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8826
8827 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8828 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8829
8830 IEM_MC_ADVANCE_RIP();
8831 IEM_MC_END();
8832 return VINF_SUCCESS;
8833 }
8834 /* The register, register encoding is invalid. */
8835 return IEMOP_RAISE_INVALID_OPCODE();
8836}
8837
8838/** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */
8839FNIEMOP_DEF(iemOp_movntdq_Mx_Vx)
8840{
8841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8842 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
8843 {
8844 /* Register, memory. */
8845 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx");
8846 IEM_MC_BEGIN(0, 2);
8847 IEM_MC_LOCAL(RTUINT128U, uSrc);
8848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8849
8850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
8853 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8854
8855 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
8856 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
8857
8858 IEM_MC_ADVANCE_RIP();
8859 IEM_MC_END();
8860 return VINF_SUCCESS;
8861 }
8862
8863 /* The register, register encoding is invalid. */
8864 return IEMOP_RAISE_INVALID_OPCODE();
8865}
8866
8867/* Opcode 0xf3 0x0f 0xe7 - invalid */
8868/* Opcode 0xf2 0x0f 0xe7 - invalid */
8869
8870
8871/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
8872FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
8873/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */
8874FNIEMOP_STUB(iemOp_psubsb_Vx_W);
8875/* Opcode 0xf3 0x0f 0xe8 - invalid */
8876/* Opcode 0xf2 0x0f 0xe8 - invalid */
8877
8878/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
8879FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
8880/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
8881FNIEMOP_STUB(iemOp_psubsw_Vx_Wx);
8882/* Opcode 0xf3 0x0f 0xe9 - invalid */
8883/* Opcode 0xf2 0x0f 0xe9 - invalid */
8884
8885/** Opcode 0x0f 0xea - pminsw Pq, Qq */
8886FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
8887/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
8888FNIEMOP_STUB(iemOp_pminsw_Vx_Wx);
8889/* Opcode 0xf3 0x0f 0xea - invalid */
8890/* Opcode 0xf2 0x0f 0xea - invalid */
8891
8892/** Opcode 0x0f 0xeb - por Pq, Qq */
8893FNIEMOP_STUB(iemOp_por_Pq_Qq);
8894/** Opcode 0x66 0x0f 0xeb - por Vx, W */
8895FNIEMOP_STUB(iemOp_por_Vx_W);
8896/* Opcode 0xf3 0x0f 0xeb - invalid */
8897/* Opcode 0xf2 0x0f 0xeb - invalid */
8898
8899/** Opcode 0x0f 0xec - paddsb Pq, Qq */
8900FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
8901/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
8902FNIEMOP_STUB(iemOp_paddsb_Vx_Wx);
8903/* Opcode 0xf3 0x0f 0xec - invalid */
8904/* Opcode 0xf2 0x0f 0xec - invalid */
8905
8906/** Opcode 0x0f 0xed - paddsw Pq, Qq */
8907FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
8908/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
8909FNIEMOP_STUB(iemOp_paddsw_Vx_Wx);
8910/* Opcode 0xf3 0x0f 0xed - invalid */
8911/* Opcode 0xf2 0x0f 0xed - invalid */
8912
8913/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
8914FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
8915/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */
8916FNIEMOP_STUB(iemOp_pmaxsw_Vx_W);
8917/* Opcode 0xf3 0x0f 0xee - invalid */
8918/* Opcode 0xf2 0x0f 0xee - invalid */
8919
8920
8921/** Opcode 0x0f 0xef - pxor Pq, Qq */
8922FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
8923{
8924 IEMOP_MNEMONIC(pxor, "pxor");
8925 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor);
8926}
8927
8928/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
8929FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
8930{
8931 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor");
8932 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
8933}
8934
8935/* Opcode 0xf3 0x0f 0xef - invalid */
8936/* Opcode 0xf2 0x0f 0xef - invalid */
8937
8938/* Opcode 0x0f 0xf0 - invalid */
8939/* Opcode 0x66 0x0f 0xf0 - invalid */
8940/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
8941FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
8942
8943/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
8944FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
8945/** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */
8946FNIEMOP_STUB(iemOp_psllw_Vx_W);
8947/* Opcode 0xf2 0x0f 0xf1 - invalid */
8948
8949/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
8950FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
8951/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
8952FNIEMOP_STUB(iemOp_pslld_Vx_Wx);
8953/* Opcode 0xf2 0x0f 0xf2 - invalid */
8954
8955/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
8956FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
8957/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
8958FNIEMOP_STUB(iemOp_psllq_Vx_Wx);
8959/* Opcode 0xf2 0x0f 0xf3 - invalid */
8960
8961/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
8962FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
8963/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
8964FNIEMOP_STUB(iemOp_pmuludq_Vx_W);
8965/* Opcode 0xf2 0x0f 0xf4 - invalid */
8966
8967/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
8968FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
8969/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
8970FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx);
8971/* Opcode 0xf2 0x0f 0xf5 - invalid */
8972
8973/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
8974FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
8975/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
8976FNIEMOP_STUB(iemOp_psadbw_Vx_Wx);
8977/* Opcode 0xf2 0x0f 0xf6 - invalid */
8978
8979/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
8980FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
8981/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
8982FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
8983/* Opcode 0xf2 0x0f 0xf7 - invalid */
8984
8985/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
8986FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
8987/** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */
8988FNIEMOP_STUB(iemOp_psubb_Vx_W);
8989/* Opcode 0xf2 0x0f 0xf8 - invalid */
8990
8991/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
8992FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
8993/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
8994FNIEMOP_STUB(iemOp_psubw_Vx_Wx);
8995/* Opcode 0xf2 0x0f 0xf9 - invalid */
8996
8997/** Opcode 0x0f 0xfa - psubd Pq, Qq */
8998FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
8999/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
9000FNIEMOP_STUB(iemOp_psubd_Vx_Wx);
9001/* Opcode 0xf2 0x0f 0xfa - invalid */
9002
9003/** Opcode 0x0f 0xfb - psubq Pq, Qq */
9004FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
9005/** Opcode 0x66 0x0f 0xfb - psubq Vx, W */
9006FNIEMOP_STUB(iemOp_psubq_Vx_W);
9007/* Opcode 0xf2 0x0f 0xfb - invalid */
9008
9009/** Opcode 0x0f 0xfc - paddb Pq, Qq */
9010FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
9011/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
9012FNIEMOP_STUB(iemOp_paddb_Vx_Wx);
9013/* Opcode 0xf2 0x0f 0xfc - invalid */
9014
9015/** Opcode 0x0f 0xfd - paddw Pq, Qq */
9016FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
9017/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
9018FNIEMOP_STUB(iemOp_paddw_Vx_Wx);
9019/* Opcode 0xf2 0x0f 0xfd - invalid */
9020
9021/** Opcode 0x0f 0xfe - paddd Pq, Qq */
9022FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
9023/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
9024FNIEMOP_STUB(iemOp_paddd_Vx_W);
9025/* Opcode 0xf2 0x0f 0xfe - invalid */
9026
9027
9028/** Opcode **** 0x0f 0xff - UD0 */
9029FNIEMOP_DEF(iemOp_ud0)
9030{
9031 IEMOP_MNEMONIC(ud0, "ud0");
9032 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
9033 {
9034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
9035#ifndef TST_IEM_CHECK_MC
9036 RTGCPTR GCPtrEff;
9037 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
9038 if (rcStrict != VINF_SUCCESS)
9039 return rcStrict;
9040#endif
9041 IEMOP_HLP_DONE_DECODING();
9042 }
9043 return IEMOP_RAISE_INVALID_OPCODE();
9044}
9045
9046
9047
9048/**
9049 * Two byte opcode map, first byte 0x0f.
9050 *
9051 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
9052 * check if it needs updating as well when making changes.
9053 */
9054IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
9055{
9056 /* no prefix, 066h prefix f3h prefix, f2h prefix */
9057 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
9058 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
9059 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
9060 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
9061 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
9062 /* 0x05 */ IEMOP_X4(iemOp_syscall),
9063 /* 0x06 */ IEMOP_X4(iemOp_clts),
9064 /* 0x07 */ IEMOP_X4(iemOp_sysret),
9065 /* 0x08 */ IEMOP_X4(iemOp_invd),
9066 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
9067 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
9068 /* 0x0b */ IEMOP_X4(iemOp_ud2),
9069 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
9070 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
9071 /* 0x0e */ IEMOP_X4(iemOp_femms),
9072 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
9073
9074 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
9075 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
9076 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
9077 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9078 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9079 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9080 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
9081 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9082 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
9083 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
9084 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
9085 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
9086 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
9087 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
9088 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
9089 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
9090
9091 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
9092 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
9093 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
9094 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
9095 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
9096 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9097 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
9098 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
9099 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9100 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9101 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
9102 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9103 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
9104 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
9105 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9106 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9107
9108 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
9109 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
9110 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
9111 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
9112 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
9113 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
9114 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
9115 /* 0x37 */ IEMOP_X4(iemOp_getsec),
9116 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
9117 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9118 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
9119 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9120 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9121 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
9122 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9123 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
9124
9125 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
9126 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
9127 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
9128 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
9129 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
9130 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
9131 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
9132 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
9133 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
9134 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
9135 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
9136 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
9137 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
9138 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
9139 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
9140 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
9141
9142 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9143 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
9144 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
9145 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
9146 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9147 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9148 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9149 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9150 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
9151 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
9152 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
9153 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
9154 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
9155 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
9156 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
9157 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
9158
9159 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9160 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9161 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9162 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9163 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9164 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9165 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9166 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9167 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9168 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9169 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9170 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9171 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9172 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9173 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9174 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM,
9175
9176 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
9177 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
9178 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
9179 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
9180 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9181 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9182 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9183 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9184
9185 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9186 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9187 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9188 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9189 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
9190 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
9191 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
9192 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
9193
9194 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
9195 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
9196 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
9197 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
9198 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
9199 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
9200 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
9201 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
9202 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
9203 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
9204 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
9205 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
9206 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
9207 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
9208 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
9209 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
9210
9211 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
9212 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
9213 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
9214 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
9215 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
9216 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
9217 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
9218 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
9219 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
9220 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
9221 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
9222 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
9223 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
9224 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
9225 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
9226 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
9227
9228 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
9229 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
9230 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
9231 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
9232 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
9233 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
9234 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
9235 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
9236 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
9237 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
9238 /* 0xaa */ IEMOP_X4(iemOp_rsm),
9239 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
9240 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
9241 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
9242 /* 0xae */ IEMOP_X4(iemOp_Grp15),
9243 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
9244
9245 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
9246 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
9247 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
9248 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
9249 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
9250 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
9251 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
9252 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
9253 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
9254 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
9255 /* 0xba */ IEMOP_X4(iemOp_Grp8),
9256 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
9257 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
9258 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
9259 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
9260 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
9261
9262 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
9263 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
9264 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
9265 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9266 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9267 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9268 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
9269 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
9270 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
9271 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
9272 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
9273 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
9274 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
9275 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
9276 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
9277 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
9278
9279 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
9280 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9281 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9282 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9283 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9284 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9285 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
9286 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9287 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9288 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9289 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9290 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9291 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9292 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9293 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9294 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9295
9296 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9297 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9298 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9299 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9300 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9301 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9302 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
9303 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9304 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9305 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9306 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9307 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9308 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9309 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9310 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9311 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9312
9313 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
9314 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9315 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9316 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9317 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9318 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9319 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9320 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9321 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9322 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9323 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9324 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9325 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9326 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9327 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
9328 /* 0xff */ IEMOP_X4(iemOp_ud0),
9329};
9330AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
9331
9332/** @} */
9333
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette