VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96392

Last change on this file since 96392 was 96392, checked in by vboxsync, 3 years ago

VMM/IEM: Implement addsubps/addsubpd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 394.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96392 2022-08-22 08:01:49Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.215389.xyz. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE instructions on the forms:
740 * pxxs xmm1, xmm2/mem128
741 *
742 * Proper alignment of the 128-bit operand is enforced.
743 * Exceptions type 2. SSE cpuid checks.
744 *
745 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
746 */
747FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
758 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
759 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
760 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
762 IEM_MC_PREPARE_SSE_USAGE();
763 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
765 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
766 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
767 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
768
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 else
773 {
774 /*
775 * Register, memory.
776 */
777 IEM_MC_BEGIN(3, 2);
778 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
779 IEM_MC_LOCAL(X86XMMREG, uSrc2);
780 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
781 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
782 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
788 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789
790 IEM_MC_PREPARE_SSE_USAGE();
791 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * Common worker for SSE instructions on the forms:
805 * pxxs xmm1, xmm2/mem32
806 *
807 * Proper alignment of the 128-bit operand is enforced.
808 * Exceptions type 2. SSE cpuid checks.
809 *
810 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
811 */
812FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
813{
814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
815 if (IEM_IS_MODRM_REG_MODE(bRm))
816 {
817 /*
818 * Register, register.
819 */
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 IEM_MC_BEGIN(3, 1);
822 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
825 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
826 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
827 IEM_MC_PREPARE_SSE_USAGE();
828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
829 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
833
834 IEM_MC_ADVANCE_RIP();
835 IEM_MC_END();
836 }
837 else
838 {
839 /*
840 * Register, memory.
841 */
842 IEM_MC_BEGIN(3, 2);
843 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
844 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854
855 IEM_MC_PREPARE_SSE_USAGE();
856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
860
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 }
864 return VINF_SUCCESS;
865}
866
867
868/**
869 * Common worker for SSE2 instructions on the forms:
870 * pxxd xmm1, xmm2/mem128
871 *
872 * Proper alignment of the 128-bit operand is enforced.
873 * Exceptions type 2. SSE cpuid checks.
874 *
875 * @sa iemOpCommonSseFp_FullFull_To_Full
876 */
877FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 if (IEM_IS_MODRM_REG_MODE(bRm))
881 {
882 /*
883 * Register, register.
884 */
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_BEGIN(3, 1);
887 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
888 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
889 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
890 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
892 IEM_MC_PREPARE_SSE_USAGE();
893 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
894 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
895 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
896 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
898
899 IEM_MC_ADVANCE_RIP();
900 IEM_MC_END();
901 }
902 else
903 {
904 /*
905 * Register, memory.
906 */
907 IEM_MC_BEGIN(3, 2);
908 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
909 IEM_MC_LOCAL(X86XMMREG, uSrc2);
910 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
911 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
912 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
914
915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
918 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
919
920 IEM_MC_PREPARE_SSE_USAGE();
921 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
922 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
923 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
924 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
925
926 IEM_MC_ADVANCE_RIP();
927 IEM_MC_END();
928 }
929 return VINF_SUCCESS;
930}
931
932
933/**
934 * Common worker for SSE2 instructions on the forms:
935 * pxxs xmm1, xmm2/mem64
936 *
937 * Proper alignment of the 128-bit operand is enforced.
938 * Exceptions type 2. SSE2 cpuid checks.
939 *
940 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
941 */
942FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
943{
944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
945 if (IEM_IS_MODRM_REG_MODE(bRm))
946 {
947 /*
948 * Register, register.
949 */
950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
951 IEM_MC_BEGIN(3, 1);
952 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
953 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
954 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
955 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
957 IEM_MC_PREPARE_SSE_USAGE();
958 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
959 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
960 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
961 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
962 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
963
964 IEM_MC_ADVANCE_RIP();
965 IEM_MC_END();
966 }
967 else
968 {
969 /*
970 * Register, memory.
971 */
972 IEM_MC_BEGIN(3, 2);
973 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
974 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
975 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
976 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
977 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
979
980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
983 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
984
985 IEM_MC_PREPARE_SSE_USAGE();
986 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
987 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
988 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
989 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
990
991 IEM_MC_ADVANCE_RIP();
992 IEM_MC_END();
993 }
994 return VINF_SUCCESS;
995}
996
997
998/**
999 * Common worker for SSE2 instructions on the form:
1000 * pxxxx xmm1, xmm2/mem128
1001 *
1002 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1003 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1004 *
1005 * Exceptions type 4.
1006 */
1007FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1008{
1009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1010 if (IEM_IS_MODRM_REG_MODE(bRm))
1011 {
1012 /*
1013 * Register, register.
1014 */
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016 IEM_MC_BEGIN(2, 0);
1017 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1018 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1019 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1020 IEM_MC_PREPARE_SSE_USAGE();
1021 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1022 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1023 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1024 IEM_MC_ADVANCE_RIP();
1025 IEM_MC_END();
1026 }
1027 else
1028 {
1029 /*
1030 * Register, memory.
1031 */
1032 IEM_MC_BEGIN(2, 2);
1033 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1034 IEM_MC_LOCAL(RTUINT128U, uSrc);
1035 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1037
1038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1041 /** @todo Most CPUs probably only read the high qword. We read everything to
1042 * make sure we apply segmentation and alignment checks correctly.
1043 * When we have time, it would be interesting to explore what real
1044 * CPUs actually does and whether it will do a TLB load for the lower
1045 * part or skip any associated \#PF. */
1046 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1047
1048 IEM_MC_PREPARE_SSE_USAGE();
1049 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1050 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1051
1052 IEM_MC_ADVANCE_RIP();
1053 IEM_MC_END();
1054 }
1055 return VINF_SUCCESS;
1056}
1057
1058
1059/**
1060 * Common worker for SSE3 instructions on the forms:
1061 * hxxx xmm1, xmm2/mem128
1062 *
1063 * Proper alignment of the 128-bit operand is enforced.
1064 * Exceptions type 2. SSE3 cpuid checks.
1065 *
1066 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1067 */
1068FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_REG_MODE(bRm))
1072 {
1073 /*
1074 * Register, register.
1075 */
1076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1077 IEM_MC_BEGIN(3, 1);
1078 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1079 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1080 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1081 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1082 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1083 IEM_MC_PREPARE_SSE_USAGE();
1084 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1085 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1086 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1087 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1088 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Register, memory.
1097 */
1098 IEM_MC_BEGIN(3, 2);
1099 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1100 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1101 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1102 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1103 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1105
1106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1109 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1110
1111 IEM_MC_PREPARE_SSE_USAGE();
1112 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1113 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1114 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1115 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1116
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 }
1120 return VINF_SUCCESS;
1121}
1122
1123
1124/** Opcode 0x0f 0x00 /0. */
1125FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1126{
1127 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1128 IEMOP_HLP_MIN_286();
1129 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1130
1131 if (IEM_IS_MODRM_REG_MODE(bRm))
1132 {
1133 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1134 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1135 }
1136
1137 /* Ignore operand size here, memory refs are always 16-bit. */
1138 IEM_MC_BEGIN(2, 0);
1139 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1140 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1142 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1143 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1144 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1145 IEM_MC_END();
1146 return VINF_SUCCESS;
1147}
1148
1149
1150/** Opcode 0x0f 0x00 /1. */
1151FNIEMOPRM_DEF(iemOp_Grp6_str)
1152{
1153 IEMOP_MNEMONIC(str, "str Rv/Mw");
1154 IEMOP_HLP_MIN_286();
1155 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1156
1157
1158 if (IEM_IS_MODRM_REG_MODE(bRm))
1159 {
1160 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1161 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1162 }
1163
1164 /* Ignore operand size here, memory refs are always 16-bit. */
1165 IEM_MC_BEGIN(2, 0);
1166 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1167 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1169 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1170 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1171 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1172 IEM_MC_END();
1173 return VINF_SUCCESS;
1174}
1175
1176
1177/** Opcode 0x0f 0x00 /2. */
1178FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1179{
1180 IEMOP_MNEMONIC(lldt, "lldt Ew");
1181 IEMOP_HLP_MIN_286();
1182 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1183
1184 if (IEM_IS_MODRM_REG_MODE(bRm))
1185 {
1186 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1187 IEM_MC_BEGIN(1, 0);
1188 IEM_MC_ARG(uint16_t, u16Sel, 0);
1189 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1190 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1191 IEM_MC_END();
1192 }
1193 else
1194 {
1195 IEM_MC_BEGIN(1, 1);
1196 IEM_MC_ARG(uint16_t, u16Sel, 0);
1197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1200 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1201 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1202 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1203 IEM_MC_END();
1204 }
1205 return VINF_SUCCESS;
1206}
1207
1208
1209/** Opcode 0x0f 0x00 /3. */
1210FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1211{
1212 IEMOP_MNEMONIC(ltr, "ltr Ew");
1213 IEMOP_HLP_MIN_286();
1214 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1215
1216 if (IEM_IS_MODRM_REG_MODE(bRm))
1217 {
1218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1219 IEM_MC_BEGIN(1, 0);
1220 IEM_MC_ARG(uint16_t, u16Sel, 0);
1221 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1222 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1223 IEM_MC_END();
1224 }
1225 else
1226 {
1227 IEM_MC_BEGIN(1, 1);
1228 IEM_MC_ARG(uint16_t, u16Sel, 0);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1233 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1234 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1235 IEM_MC_END();
1236 }
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/** Opcode 0x0f 0x00 /3. */
1242FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1243{
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1250 IEM_MC_BEGIN(2, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1253 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1254 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1255 IEM_MC_END();
1256 }
1257 else
1258 {
1259 IEM_MC_BEGIN(2, 1);
1260 IEM_MC_ARG(uint16_t, u16Sel, 0);
1261 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1265 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1266 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1267 IEM_MC_END();
1268 }
1269 return VINF_SUCCESS;
1270}
1271
1272
1273/** Opcode 0x0f 0x00 /4. */
1274FNIEMOPRM_DEF(iemOp_Grp6_verr)
1275{
1276 IEMOP_MNEMONIC(verr, "verr Ew");
1277 IEMOP_HLP_MIN_286();
1278 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1279}
1280
1281
1282/** Opcode 0x0f 0x00 /5. */
1283FNIEMOPRM_DEF(iemOp_Grp6_verw)
1284{
1285 IEMOP_MNEMONIC(verw, "verw Ew");
1286 IEMOP_HLP_MIN_286();
1287 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1288}
1289
1290
1291/**
1292 * Group 6 jump table.
1293 */
1294IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1295{
1296 iemOp_Grp6_sldt,
1297 iemOp_Grp6_str,
1298 iemOp_Grp6_lldt,
1299 iemOp_Grp6_ltr,
1300 iemOp_Grp6_verr,
1301 iemOp_Grp6_verw,
1302 iemOp_InvalidWithRM,
1303 iemOp_InvalidWithRM
1304};
1305
1306/** Opcode 0x0f 0x00. */
1307FNIEMOP_DEF(iemOp_Grp6)
1308{
1309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1310 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1311}
1312
1313
1314/** Opcode 0x0f 0x01 /0. */
1315FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1316{
1317 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1318 IEMOP_HLP_MIN_286();
1319 IEMOP_HLP_64BIT_OP_SIZE();
1320 IEM_MC_BEGIN(2, 1);
1321 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1322 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1325 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1326 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1327 IEM_MC_END();
1328 return VINF_SUCCESS;
1329}
1330
1331
1332/** Opcode 0x0f 0x01 /0. */
1333FNIEMOP_DEF(iemOp_Grp7_vmcall)
1334{
1335 IEMOP_MNEMONIC(vmcall, "vmcall");
1336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1337
1338 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1339 want all hypercalls regardless of instruction used, and if a
1340 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1341 (NEM/win makes ASSUMPTIONS about this behavior.) */
1342 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1343}
1344
1345
1346/** Opcode 0x0f 0x01 /0. */
1347#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1348FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1349{
1350 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1351 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1352 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1353 IEMOP_HLP_DONE_DECODING();
1354 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1355}
1356#else
1357FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1358{
1359 IEMOP_BITCH_ABOUT_STUB();
1360 return IEMOP_RAISE_INVALID_OPCODE();
1361}
1362#endif
1363
1364
1365/** Opcode 0x0f 0x01 /0. */
1366#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1367FNIEMOP_DEF(iemOp_Grp7_vmresume)
1368{
1369 IEMOP_MNEMONIC(vmresume, "vmresume");
1370 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1371 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1372 IEMOP_HLP_DONE_DECODING();
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1374}
1375#else
1376FNIEMOP_DEF(iemOp_Grp7_vmresume)
1377{
1378 IEMOP_BITCH_ABOUT_STUB();
1379 return IEMOP_RAISE_INVALID_OPCODE();
1380}
1381#endif
1382
1383
1384/** Opcode 0x0f 0x01 /0. */
1385#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1386FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1387{
1388 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1389 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1390 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1391 IEMOP_HLP_DONE_DECODING();
1392 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1393}
1394#else
1395FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1396{
1397 IEMOP_BITCH_ABOUT_STUB();
1398 return IEMOP_RAISE_INVALID_OPCODE();
1399}
1400#endif
1401
1402
1403/** Opcode 0x0f 0x01 /1. */
1404FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1405{
1406 IEMOP_MNEMONIC(sidt, "sidt Ms");
1407 IEMOP_HLP_MIN_286();
1408 IEMOP_HLP_64BIT_OP_SIZE();
1409 IEM_MC_BEGIN(2, 1);
1410 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1411 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1414 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1415 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1416 IEM_MC_END();
1417 return VINF_SUCCESS;
1418}
1419
1420
1421/** Opcode 0x0f 0x01 /1. */
1422FNIEMOP_DEF(iemOp_Grp7_monitor)
1423{
1424 IEMOP_MNEMONIC(monitor, "monitor");
1425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1426 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1427}
1428
1429
1430/** Opcode 0x0f 0x01 /1. */
1431FNIEMOP_DEF(iemOp_Grp7_mwait)
1432{
1433 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1436}
1437
1438
1439/** Opcode 0x0f 0x01 /2. */
1440FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1441{
1442 IEMOP_MNEMONIC(lgdt, "lgdt");
1443 IEMOP_HLP_64BIT_OP_SIZE();
1444 IEM_MC_BEGIN(3, 1);
1445 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1446 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1447 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1450 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1451 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1452 IEM_MC_END();
1453 return VINF_SUCCESS;
1454}
1455
1456
1457/** Opcode 0x0f 0x01 0xd0. */
1458FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1459{
1460 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1461 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1462 {
1463 /** @todo r=ramshankar: We should use
1464 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1465 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1467 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1468 }
1469 return IEMOP_RAISE_INVALID_OPCODE();
1470}
1471
1472
1473/** Opcode 0x0f 0x01 0xd1. */
1474FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1475{
1476 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1477 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1478 {
1479 /** @todo r=ramshankar: We should use
1480 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1481 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1483 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1484 }
1485 return IEMOP_RAISE_INVALID_OPCODE();
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /3. */
1490FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1491{
1492 IEMOP_MNEMONIC(lidt, "lidt");
1493 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1494 ? IEMMODE_64BIT
1495 : pVCpu->iem.s.enmEffOpSize;
1496 IEM_MC_BEGIN(3, 1);
1497 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1498 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1499 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1502 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1503 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1504 IEM_MC_END();
1505 return VINF_SUCCESS;
1506}
1507
1508
1509/** Opcode 0x0f 0x01 0xd8. */
1510#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1511FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1512{
1513 IEMOP_MNEMONIC(vmrun, "vmrun");
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1516}
1517#else
1518FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1519#endif
1520
1521/** Opcode 0x0f 0x01 0xd9. */
1522FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1523{
1524 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1526
1527 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1528 want all hypercalls regardless of instruction used, and if a
1529 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1530 (NEM/win makes ASSUMPTIONS about this behavior.) */
1531 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1532}
1533
1534/** Opcode 0x0f 0x01 0xda. */
1535#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1536FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1537{
1538 IEMOP_MNEMONIC(vmload, "vmload");
1539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1541}
1542#else
1543FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1544#endif
1545
1546
1547/** Opcode 0x0f 0x01 0xdb. */
1548#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1549FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1550{
1551 IEMOP_MNEMONIC(vmsave, "vmsave");
1552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1553 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1554}
1555#else
1556FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1557#endif
1558
1559
1560/** Opcode 0x0f 0x01 0xdc. */
1561#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1562FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1563{
1564 IEMOP_MNEMONIC(stgi, "stgi");
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1566 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1567}
1568#else
1569FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1570#endif
1571
1572
1573/** Opcode 0x0f 0x01 0xdd. */
1574#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1575FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1576{
1577 IEMOP_MNEMONIC(clgi, "clgi");
1578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1579 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1580}
1581#else
1582FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1583#endif
1584
1585
1586/** Opcode 0x0f 0x01 0xdf. */
1587#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1588FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1589{
1590 IEMOP_MNEMONIC(invlpga, "invlpga");
1591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1593}
1594#else
1595FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1596#endif
1597
1598
1599/** Opcode 0x0f 0x01 0xde. */
1600#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1601FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1602{
1603 IEMOP_MNEMONIC(skinit, "skinit");
1604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1605 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1606}
1607#else
1608FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1609#endif
1610
1611
1612/** Opcode 0x0f 0x01 /4. */
1613FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1614{
1615 IEMOP_MNEMONIC(smsw, "smsw");
1616 IEMOP_HLP_MIN_286();
1617 if (IEM_IS_MODRM_REG_MODE(bRm))
1618 {
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1620 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1621 }
1622
1623 /* Ignore operand size here, memory refs are always 16-bit. */
1624 IEM_MC_BEGIN(2, 0);
1625 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1626 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1629 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1630 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1631 IEM_MC_END();
1632 return VINF_SUCCESS;
1633}
1634
1635
1636/** Opcode 0x0f 0x01 /6. */
1637FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1638{
1639 /* The operand size is effectively ignored, all is 16-bit and only the
1640 lower 3-bits are used. */
1641 IEMOP_MNEMONIC(lmsw, "lmsw");
1642 IEMOP_HLP_MIN_286();
1643 if (IEM_IS_MODRM_REG_MODE(bRm))
1644 {
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1646 IEM_MC_BEGIN(2, 0);
1647 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1648 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1649 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1650 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1651 IEM_MC_END();
1652 }
1653 else
1654 {
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1662 IEM_MC_END();
1663 }
1664 return VINF_SUCCESS;
1665}
1666
1667
1668/** Opcode 0x0f 0x01 /7. */
1669FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1670{
1671 IEMOP_MNEMONIC(invlpg, "invlpg");
1672 IEMOP_HLP_MIN_486();
1673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1674 IEM_MC_BEGIN(1, 1);
1675 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1677 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1678 IEM_MC_END();
1679 return VINF_SUCCESS;
1680}
1681
1682
1683/** Opcode 0x0f 0x01 /7. */
1684FNIEMOP_DEF(iemOp_Grp7_swapgs)
1685{
1686 IEMOP_MNEMONIC(swapgs, "swapgs");
1687 IEMOP_HLP_ONLY_64BIT();
1688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1689 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1690}
1691
1692
1693/** Opcode 0x0f 0x01 /7. */
1694FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1695{
1696 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1698 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1699}
1700
1701
1702/**
1703 * Group 7 jump table, memory variant.
1704 */
1705IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1706{
1707 iemOp_Grp7_sgdt,
1708 iemOp_Grp7_sidt,
1709 iemOp_Grp7_lgdt,
1710 iemOp_Grp7_lidt,
1711 iemOp_Grp7_smsw,
1712 iemOp_InvalidWithRM,
1713 iemOp_Grp7_lmsw,
1714 iemOp_Grp7_invlpg
1715};
1716
1717
1718/** Opcode 0x0f 0x01. */
1719FNIEMOP_DEF(iemOp_Grp7)
1720{
1721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1722 if (IEM_IS_MODRM_MEM_MODE(bRm))
1723 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1724
1725 switch (IEM_GET_MODRM_REG_8(bRm))
1726 {
1727 case 0:
1728 switch (IEM_GET_MODRM_RM_8(bRm))
1729 {
1730 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1731 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1732 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1733 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1734 }
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736
1737 case 1:
1738 switch (IEM_GET_MODRM_RM_8(bRm))
1739 {
1740 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1741 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1742 }
1743 return IEMOP_RAISE_INVALID_OPCODE();
1744
1745 case 2:
1746 switch (IEM_GET_MODRM_RM_8(bRm))
1747 {
1748 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1749 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1750 }
1751 return IEMOP_RAISE_INVALID_OPCODE();
1752
1753 case 3:
1754 switch (IEM_GET_MODRM_RM_8(bRm))
1755 {
1756 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1757 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1758 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1759 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1760 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1761 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1762 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1763 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1765 }
1766
1767 case 4:
1768 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1769
1770 case 5:
1771 return IEMOP_RAISE_INVALID_OPCODE();
1772
1773 case 6:
1774 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1775
1776 case 7:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1785 }
1786}
1787
1788/** Opcode 0x0f 0x00 /3. */
1789FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1790{
1791 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1793
1794 if (IEM_IS_MODRM_REG_MODE(bRm))
1795 {
1796 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1797 switch (pVCpu->iem.s.enmEffOpSize)
1798 {
1799 case IEMMODE_16BIT:
1800 {
1801 IEM_MC_BEGIN(3, 0);
1802 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1803 IEM_MC_ARG(uint16_t, u16Sel, 1);
1804 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1805
1806 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1807 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1809
1810 IEM_MC_END();
1811 return VINF_SUCCESS;
1812 }
1813
1814 case IEMMODE_32BIT:
1815 case IEMMODE_64BIT:
1816 {
1817 IEM_MC_BEGIN(3, 0);
1818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1819 IEM_MC_ARG(uint16_t, u16Sel, 1);
1820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1821
1822 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1823 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1825
1826 IEM_MC_END();
1827 return VINF_SUCCESS;
1828 }
1829
1830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1831 }
1832 }
1833 else
1834 {
1835 switch (pVCpu->iem.s.enmEffOpSize)
1836 {
1837 case IEMMODE_16BIT:
1838 {
1839 IEM_MC_BEGIN(3, 1);
1840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1841 IEM_MC_ARG(uint16_t, u16Sel, 1);
1842 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1844
1845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1846 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1847
1848 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1849 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1850 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1851
1852 IEM_MC_END();
1853 return VINF_SUCCESS;
1854 }
1855
1856 case IEMMODE_32BIT:
1857 case IEMMODE_64BIT:
1858 {
1859 IEM_MC_BEGIN(3, 1);
1860 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1861 IEM_MC_ARG(uint16_t, u16Sel, 1);
1862 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1864
1865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1866 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1867/** @todo testcase: make sure it's a 16-bit read. */
1868
1869 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1871 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1872
1873 IEM_MC_END();
1874 return VINF_SUCCESS;
1875 }
1876
1877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1878 }
1879 }
1880}
1881
1882
1883
1884/** Opcode 0x0f 0x02. */
1885FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1886{
1887 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1888 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1889}
1890
1891
1892/** Opcode 0x0f 0x03. */
1893FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1894{
1895 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1896 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1897}
1898
1899
1900/** Opcode 0x0f 0x05. */
1901FNIEMOP_DEF(iemOp_syscall)
1902{
1903 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1905 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1906}
1907
1908
1909/** Opcode 0x0f 0x06. */
1910FNIEMOP_DEF(iemOp_clts)
1911{
1912 IEMOP_MNEMONIC(clts, "clts");
1913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1914 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1915}
1916
1917
1918/** Opcode 0x0f 0x07. */
1919FNIEMOP_DEF(iemOp_sysret)
1920{
1921 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1924}
1925
1926
1927/** Opcode 0x0f 0x08. */
1928FNIEMOP_DEF(iemOp_invd)
1929{
1930 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1931 IEMOP_HLP_MIN_486();
1932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1933 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1934}
1935
1936
1937/** Opcode 0x0f 0x09. */
1938FNIEMOP_DEF(iemOp_wbinvd)
1939{
1940 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1941 IEMOP_HLP_MIN_486();
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1944}
1945
1946
1947/** Opcode 0x0f 0x0b. */
1948FNIEMOP_DEF(iemOp_ud2)
1949{
1950 IEMOP_MNEMONIC(ud2, "ud2");
1951 return IEMOP_RAISE_INVALID_OPCODE();
1952}
1953
1954/** Opcode 0x0f 0x0d. */
1955FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1956{
1957 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1958 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1959 {
1960 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1961 return IEMOP_RAISE_INVALID_OPCODE();
1962 }
1963
1964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1965 if (IEM_IS_MODRM_REG_MODE(bRm))
1966 {
1967 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1968 return IEMOP_RAISE_INVALID_OPCODE();
1969 }
1970
1971 switch (IEM_GET_MODRM_REG_8(bRm))
1972 {
1973 case 2: /* Aliased to /0 for the time being. */
1974 case 4: /* Aliased to /0 for the time being. */
1975 case 5: /* Aliased to /0 for the time being. */
1976 case 6: /* Aliased to /0 for the time being. */
1977 case 7: /* Aliased to /0 for the time being. */
1978 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1979 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1980 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1982 }
1983
1984 IEM_MC_BEGIN(0, 1);
1985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1988 /* Currently a NOP. */
1989 NOREF(GCPtrEffSrc);
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 return VINF_SUCCESS;
1993}
1994
1995
1996/** Opcode 0x0f 0x0e. */
1997FNIEMOP_DEF(iemOp_femms)
1998{
1999 IEMOP_MNEMONIC(femms, "femms");
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001
2002 IEM_MC_BEGIN(0,0);
2003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2005 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2006 IEM_MC_FPU_FROM_MMX_MODE();
2007 IEM_MC_ADVANCE_RIP();
2008 IEM_MC_END();
2009 return VINF_SUCCESS;
2010}
2011
2012
2013/** Opcode 0x0f 0x0f. */
2014FNIEMOP_DEF(iemOp_3Dnow)
2015{
2016 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2017 {
2018 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020 }
2021
2022#ifdef IEM_WITH_3DNOW
2023 /* This is pretty sparse, use switch instead of table. */
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2026#else
2027 IEMOP_BITCH_ABOUT_STUB();
2028 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2029#endif
2030}
2031
2032
2033/**
2034 * @opcode 0x10
2035 * @oppfx none
2036 * @opcpuid sse
2037 * @opgroup og_sse_simdfp_datamove
2038 * @opxcpttype 4UA
2039 * @optest op1=1 op2=2 -> op1=2
2040 * @optest op1=0 op2=-22 -> op1=-22
2041 */
2042FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2043{
2044 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if (IEM_IS_MODRM_REG_MODE(bRm))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2056 IEM_GET_MODRM_RM(pVCpu, bRm));
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Memory, register.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081
2082}
2083
2084
2085/**
2086 * @opcode 0x10
2087 * @oppfx 0x66
2088 * @opcpuid sse2
2089 * @opgroup og_sse2_pcksclr_datamove
2090 * @opxcpttype 4UA
2091 * @optest op1=1 op2=2 -> op1=2
2092 * @optest op1=0 op2=-42 -> op1=-42
2093 */
2094FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2095{
2096 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2098 if (IEM_IS_MODRM_REG_MODE(bRm))
2099 {
2100 /*
2101 * Register, register.
2102 */
2103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2104 IEM_MC_BEGIN(0, 0);
2105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2106 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2107 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2108 IEM_GET_MODRM_RM(pVCpu, bRm));
2109 IEM_MC_ADVANCE_RIP();
2110 IEM_MC_END();
2111 }
2112 else
2113 {
2114 /*
2115 * Memory, register.
2116 */
2117 IEM_MC_BEGIN(0, 2);
2118 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2120
2121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2124 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2125
2126 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2127 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2128
2129 IEM_MC_ADVANCE_RIP();
2130 IEM_MC_END();
2131 }
2132 return VINF_SUCCESS;
2133}
2134
2135
2136/**
2137 * @opcode 0x10
2138 * @oppfx 0xf3
2139 * @opcpuid sse
2140 * @opgroup og_sse_simdfp_datamove
2141 * @opxcpttype 5
2142 * @optest op1=1 op2=2 -> op1=2
2143 * @optest op1=0 op2=-22 -> op1=-22
2144 */
2145FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2146{
2147 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2149 if (IEM_IS_MODRM_REG_MODE(bRm))
2150 {
2151 /*
2152 * Register, register.
2153 */
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 1);
2156 IEM_MC_LOCAL(uint32_t, uSrc);
2157
2158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2160 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2161 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2162
2163 IEM_MC_ADVANCE_RIP();
2164 IEM_MC_END();
2165 }
2166 else
2167 {
2168 /*
2169 * Memory, register.
2170 */
2171 IEM_MC_BEGIN(0, 2);
2172 IEM_MC_LOCAL(uint32_t, uSrc);
2173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2174
2175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2178 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2179
2180 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2181 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2182
2183 IEM_MC_ADVANCE_RIP();
2184 IEM_MC_END();
2185 }
2186 return VINF_SUCCESS;
2187}
2188
2189
2190/**
2191 * @opcode 0x10
2192 * @oppfx 0xf2
2193 * @opcpuid sse2
2194 * @opgroup og_sse2_pcksclr_datamove
2195 * @opxcpttype 5
2196 * @optest op1=1 op2=2 -> op1=2
2197 * @optest op1=0 op2=-42 -> op1=-42
2198 */
2199FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2200{
2201 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2203 if (IEM_IS_MODRM_REG_MODE(bRm))
2204 {
2205 /*
2206 * Register, register.
2207 */
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_BEGIN(0, 1);
2210 IEM_MC_LOCAL(uint64_t, uSrc);
2211
2212 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2213 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2214 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2215 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2216
2217 IEM_MC_ADVANCE_RIP();
2218 IEM_MC_END();
2219 }
2220 else
2221 {
2222 /*
2223 * Memory, register.
2224 */
2225 IEM_MC_BEGIN(0, 2);
2226 IEM_MC_LOCAL(uint64_t, uSrc);
2227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2228
2229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2231 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2232 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2233
2234 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2235 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2236
2237 IEM_MC_ADVANCE_RIP();
2238 IEM_MC_END();
2239 }
2240 return VINF_SUCCESS;
2241}
2242
2243
2244/**
2245 * @opcode 0x11
2246 * @oppfx none
2247 * @opcpuid sse
2248 * @opgroup og_sse_simdfp_datamove
2249 * @opxcpttype 4UA
2250 * @optest op1=1 op2=2 -> op1=2
2251 * @optest op1=0 op2=-42 -> op1=-42
2252 */
2253FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2254{
2255 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2257 if (IEM_IS_MODRM_REG_MODE(bRm))
2258 {
2259 /*
2260 * Register, register.
2261 */
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 IEM_MC_BEGIN(0, 0);
2264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2266 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2267 IEM_GET_MODRM_REG(pVCpu, bRm));
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 else
2272 {
2273 /*
2274 * Memory, register.
2275 */
2276 IEM_MC_BEGIN(0, 2);
2277 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2284
2285 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2286 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2287
2288 IEM_MC_ADVANCE_RIP();
2289 IEM_MC_END();
2290 }
2291 return VINF_SUCCESS;
2292}
2293
2294
2295/**
2296 * @opcode 0x11
2297 * @oppfx 0x66
2298 * @opcpuid sse2
2299 * @opgroup og_sse2_pcksclr_datamove
2300 * @opxcpttype 4UA
2301 * @optest op1=1 op2=2 -> op1=2
2302 * @optest op1=0 op2=-42 -> op1=-42
2303 */
2304FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2305{
2306 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2308 if (IEM_IS_MODRM_REG_MODE(bRm))
2309 {
2310 /*
2311 * Register, register.
2312 */
2313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2314 IEM_MC_BEGIN(0, 0);
2315 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2316 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2317 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2318 IEM_GET_MODRM_REG(pVCpu, bRm));
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 else
2323 {
2324 /*
2325 * Memory, register.
2326 */
2327 IEM_MC_BEGIN(0, 2);
2328 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2330
2331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2333 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2335
2336 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2337 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2338
2339 IEM_MC_ADVANCE_RIP();
2340 IEM_MC_END();
2341 }
2342 return VINF_SUCCESS;
2343}
2344
2345
2346/**
2347 * @opcode 0x11
2348 * @oppfx 0xf3
2349 * @opcpuid sse
2350 * @opgroup og_sse_simdfp_datamove
2351 * @opxcpttype 5
2352 * @optest op1=1 op2=2 -> op1=2
2353 * @optest op1=0 op2=-22 -> op1=-22
2354 */
2355FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2356{
2357 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2359 if (IEM_IS_MODRM_REG_MODE(bRm))
2360 {
2361 /*
2362 * Register, register.
2363 */
2364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2365 IEM_MC_BEGIN(0, 1);
2366 IEM_MC_LOCAL(uint32_t, uSrc);
2367
2368 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2370 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2371 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2372
2373 IEM_MC_ADVANCE_RIP();
2374 IEM_MC_END();
2375 }
2376 else
2377 {
2378 /*
2379 * Memory, register.
2380 */
2381 IEM_MC_BEGIN(0, 2);
2382 IEM_MC_LOCAL(uint32_t, uSrc);
2383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2384
2385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2389
2390 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2391 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2392
2393 IEM_MC_ADVANCE_RIP();
2394 IEM_MC_END();
2395 }
2396 return VINF_SUCCESS;
2397}
2398
2399
2400/**
2401 * @opcode 0x11
2402 * @oppfx 0xf2
2403 * @opcpuid sse2
2404 * @opgroup og_sse2_pcksclr_datamove
2405 * @opxcpttype 5
2406 * @optest op1=1 op2=2 -> op1=2
2407 * @optest op1=0 op2=-42 -> op1=-42
2408 */
2409FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2410{
2411 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2413 if (IEM_IS_MODRM_REG_MODE(bRm))
2414 {
2415 /*
2416 * Register, register.
2417 */
2418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2419 IEM_MC_BEGIN(0, 1);
2420 IEM_MC_LOCAL(uint64_t, uSrc);
2421
2422 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2424 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2425 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2426
2427 IEM_MC_ADVANCE_RIP();
2428 IEM_MC_END();
2429 }
2430 else
2431 {
2432 /*
2433 * Memory, register.
2434 */
2435 IEM_MC_BEGIN(0, 2);
2436 IEM_MC_LOCAL(uint64_t, uSrc);
2437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2438
2439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2443
2444 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2445 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2446
2447 IEM_MC_ADVANCE_RIP();
2448 IEM_MC_END();
2449 }
2450 return VINF_SUCCESS;
2451}
2452
2453
2454FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2455{
2456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2457 if (IEM_IS_MODRM_REG_MODE(bRm))
2458 {
2459 /**
2460 * @opcode 0x12
2461 * @opcodesub 11 mr/reg
2462 * @oppfx none
2463 * @opcpuid sse
2464 * @opgroup og_sse_simdfp_datamove
2465 * @opxcpttype 5
2466 * @optest op1=1 op2=2 -> op1=2
2467 * @optest op1=0 op2=-42 -> op1=-42
2468 */
2469 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2470
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_BEGIN(0, 1);
2473 IEM_MC_LOCAL(uint64_t, uSrc);
2474
2475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2477 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2478 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2479
2480 IEM_MC_ADVANCE_RIP();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /**
2486 * @opdone
2487 * @opcode 0x12
2488 * @opcodesub !11 mr/reg
2489 * @oppfx none
2490 * @opcpuid sse
2491 * @opgroup og_sse_simdfp_datamove
2492 * @opxcpttype 5
2493 * @optest op1=1 op2=2 -> op1=2
2494 * @optest op1=0 op2=-42 -> op1=-42
2495 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2496 */
2497 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2498
2499 IEM_MC_BEGIN(0, 2);
2500 IEM_MC_LOCAL(uint64_t, uSrc);
2501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2502
2503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2507
2508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 return VINF_SUCCESS;
2515}
2516
2517
2518/**
2519 * @opcode 0x12
2520 * @opcodesub !11 mr/reg
2521 * @oppfx 0x66
2522 * @opcpuid sse2
2523 * @opgroup og_sse2_pcksclr_datamove
2524 * @opxcpttype 5
2525 * @optest op1=1 op2=2 -> op1=2
2526 * @optest op1=0 op2=-42 -> op1=-42
2527 */
2528FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2529{
2530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2531 if (IEM_IS_MODRM_MEM_MODE(bRm))
2532 {
2533 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534
2535 IEM_MC_BEGIN(0, 2);
2536 IEM_MC_LOCAL(uint64_t, uSrc);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2538
2539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2543
2544 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2545 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2546
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 return VINF_SUCCESS;
2550 }
2551
2552 /**
2553 * @opdone
2554 * @opmnemonic ud660f12m3
2555 * @opcode 0x12
2556 * @opcodesub 11 mr/reg
2557 * @oppfx 0x66
2558 * @opunused immediate
2559 * @opcpuid sse
2560 * @optest ->
2561 */
2562 return IEMOP_RAISE_INVALID_OPCODE();
2563}
2564
2565
2566/**
2567 * @opcode 0x12
2568 * @oppfx 0xf3
2569 * @opcpuid sse3
2570 * @opgroup og_sse3_pcksclr_datamove
2571 * @opxcpttype 4
2572 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2573 * op1=0x00000002000000020000000100000001
2574 */
2575FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2576{
2577 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2579 if (IEM_IS_MODRM_REG_MODE(bRm))
2580 {
2581 /*
2582 * Register, register.
2583 */
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585 IEM_MC_BEGIN(2, 0);
2586 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2587 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2588
2589 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2590 IEM_MC_PREPARE_SSE_USAGE();
2591
2592 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2594 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2595
2596 IEM_MC_ADVANCE_RIP();
2597 IEM_MC_END();
2598 }
2599 else
2600 {
2601 /*
2602 * Register, memory.
2603 */
2604 IEM_MC_BEGIN(2, 2);
2605 IEM_MC_LOCAL(RTUINT128U, uSrc);
2606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2607 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2608 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2609
2610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2612 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2613 IEM_MC_PREPARE_SSE_USAGE();
2614
2615 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2617 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2618
2619 IEM_MC_ADVANCE_RIP();
2620 IEM_MC_END();
2621 }
2622 return VINF_SUCCESS;
2623}
2624
2625
2626/**
2627 * @opcode 0x12
2628 * @oppfx 0xf2
2629 * @opcpuid sse3
2630 * @opgroup og_sse3_pcksclr_datamove
2631 * @opxcpttype 5
2632 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2633 * op1=0x22222222111111112222222211111111
2634 */
2635FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2636{
2637 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2639 if (IEM_IS_MODRM_REG_MODE(bRm))
2640 {
2641 /*
2642 * Register, register.
2643 */
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 IEM_MC_BEGIN(2, 0);
2646 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2647 IEM_MC_ARG(uint64_t, uSrc, 1);
2648
2649 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2650 IEM_MC_PREPARE_SSE_USAGE();
2651
2652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2654 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2655
2656 IEM_MC_ADVANCE_RIP();
2657 IEM_MC_END();
2658 }
2659 else
2660 {
2661 /*
2662 * Register, memory.
2663 */
2664 IEM_MC_BEGIN(2, 2);
2665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2667 IEM_MC_ARG(uint64_t, uSrc, 1);
2668
2669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2671 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2672 IEM_MC_PREPARE_SSE_USAGE();
2673
2674 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2675 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2676 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2677
2678 IEM_MC_ADVANCE_RIP();
2679 IEM_MC_END();
2680 }
2681 return VINF_SUCCESS;
2682}
2683
2684
2685/**
2686 * @opcode 0x13
2687 * @opcodesub !11 mr/reg
2688 * @oppfx none
2689 * @opcpuid sse
2690 * @opgroup og_sse_simdfp_datamove
2691 * @opxcpttype 5
2692 * @optest op1=1 op2=2 -> op1=2
2693 * @optest op1=0 op2=-42 -> op1=-42
2694 */
2695FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2696{
2697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2698 if (IEM_IS_MODRM_MEM_MODE(bRm))
2699 {
2700 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2701
2702 IEM_MC_BEGIN(0, 2);
2703 IEM_MC_LOCAL(uint64_t, uSrc);
2704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2705
2706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2709 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2710
2711 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2712 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2713
2714 IEM_MC_ADVANCE_RIP();
2715 IEM_MC_END();
2716 return VINF_SUCCESS;
2717 }
2718
2719 /**
2720 * @opdone
2721 * @opmnemonic ud0f13m3
2722 * @opcode 0x13
2723 * @opcodesub 11 mr/reg
2724 * @oppfx none
2725 * @opunused immediate
2726 * @opcpuid sse
2727 * @optest ->
2728 */
2729 return IEMOP_RAISE_INVALID_OPCODE();
2730}
2731
2732
2733/**
2734 * @opcode 0x13
2735 * @opcodesub !11 mr/reg
2736 * @oppfx 0x66
2737 * @opcpuid sse2
2738 * @opgroup og_sse2_pcksclr_datamove
2739 * @opxcpttype 5
2740 * @optest op1=1 op2=2 -> op1=2
2741 * @optest op1=0 op2=-42 -> op1=-42
2742 */
2743FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2744{
2745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2746 if (IEM_IS_MODRM_MEM_MODE(bRm))
2747 {
2748 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2749 IEM_MC_BEGIN(0, 2);
2750 IEM_MC_LOCAL(uint64_t, uSrc);
2751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2752
2753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2755 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2756 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2757
2758 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2759 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2760
2761 IEM_MC_ADVANCE_RIP();
2762 IEM_MC_END();
2763 return VINF_SUCCESS;
2764 }
2765
2766 /**
2767 * @opdone
2768 * @opmnemonic ud660f13m3
2769 * @opcode 0x13
2770 * @opcodesub 11 mr/reg
2771 * @oppfx 0x66
2772 * @opunused immediate
2773 * @opcpuid sse
2774 * @optest ->
2775 */
2776 return IEMOP_RAISE_INVALID_OPCODE();
2777}
2778
2779
2780/**
2781 * @opmnemonic udf30f13
2782 * @opcode 0x13
2783 * @oppfx 0xf3
2784 * @opunused intel-modrm
2785 * @opcpuid sse
2786 * @optest ->
2787 * @opdone
2788 */
2789
2790/**
2791 * @opmnemonic udf20f13
2792 * @opcode 0x13
2793 * @oppfx 0xf2
2794 * @opunused intel-modrm
2795 * @opcpuid sse
2796 * @optest ->
2797 * @opdone
2798 */
2799
2800/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2801FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2802{
2803 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2804 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2805}
2806
2807
2808/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2809FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2810{
2811 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2812 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2813}
2814
2815
2816/**
2817 * @opdone
2818 * @opmnemonic udf30f14
2819 * @opcode 0x14
2820 * @oppfx 0xf3
2821 * @opunused intel-modrm
2822 * @opcpuid sse
2823 * @optest ->
2824 * @opdone
2825 */
2826
2827/**
2828 * @opmnemonic udf20f14
2829 * @opcode 0x14
2830 * @oppfx 0xf2
2831 * @opunused intel-modrm
2832 * @opcpuid sse
2833 * @optest ->
2834 * @opdone
2835 */
2836
2837/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2838FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2839{
2840 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2841 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2842}
2843
2844
2845/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2846FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2847{
2848 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2849 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2850}
2851
2852
2853/* Opcode 0xf3 0x0f 0x15 - invalid */
2854/* Opcode 0xf2 0x0f 0x15 - invalid */
2855
2856/**
2857 * @opdone
2858 * @opmnemonic udf30f15
2859 * @opcode 0x15
2860 * @oppfx 0xf3
2861 * @opunused intel-modrm
2862 * @opcpuid sse
2863 * @optest ->
2864 * @opdone
2865 */
2866
2867/**
2868 * @opmnemonic udf20f15
2869 * @opcode 0x15
2870 * @oppfx 0xf2
2871 * @opunused intel-modrm
2872 * @opcpuid sse
2873 * @optest ->
2874 * @opdone
2875 */
2876
2877FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2878{
2879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2880 if (IEM_IS_MODRM_REG_MODE(bRm))
2881 {
2882 /**
2883 * @opcode 0x16
2884 * @opcodesub 11 mr/reg
2885 * @oppfx none
2886 * @opcpuid sse
2887 * @opgroup og_sse_simdfp_datamove
2888 * @opxcpttype 5
2889 * @optest op1=1 op2=2 -> op1=2
2890 * @optest op1=0 op2=-42 -> op1=-42
2891 */
2892 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2893
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_BEGIN(0, 1);
2896 IEM_MC_LOCAL(uint64_t, uSrc);
2897
2898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2900 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2901 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2902
2903 IEM_MC_ADVANCE_RIP();
2904 IEM_MC_END();
2905 }
2906 else
2907 {
2908 /**
2909 * @opdone
2910 * @opcode 0x16
2911 * @opcodesub !11 mr/reg
2912 * @oppfx none
2913 * @opcpuid sse
2914 * @opgroup og_sse_simdfp_datamove
2915 * @opxcpttype 5
2916 * @optest op1=1 op2=2 -> op1=2
2917 * @optest op1=0 op2=-42 -> op1=-42
2918 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2919 */
2920 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2921
2922 IEM_MC_BEGIN(0, 2);
2923 IEM_MC_LOCAL(uint64_t, uSrc);
2924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2925
2926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2930
2931 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 return VINF_SUCCESS;
2938}
2939
2940
2941/**
2942 * @opcode 0x16
2943 * @opcodesub !11 mr/reg
2944 * @oppfx 0x66
2945 * @opcpuid sse2
2946 * @opgroup og_sse2_pcksclr_datamove
2947 * @opxcpttype 5
2948 * @optest op1=1 op2=2 -> op1=2
2949 * @optest op1=0 op2=-42 -> op1=-42
2950 */
2951FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2952{
2953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2954 if (IEM_IS_MODRM_MEM_MODE(bRm))
2955 {
2956 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2957 IEM_MC_BEGIN(0, 2);
2958 IEM_MC_LOCAL(uint64_t, uSrc);
2959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2960
2961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2965
2966 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2967 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2968
2969 IEM_MC_ADVANCE_RIP();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972 }
2973
2974 /**
2975 * @opdone
2976 * @opmnemonic ud660f16m3
2977 * @opcode 0x16
2978 * @opcodesub 11 mr/reg
2979 * @oppfx 0x66
2980 * @opunused immediate
2981 * @opcpuid sse
2982 * @optest ->
2983 */
2984 return IEMOP_RAISE_INVALID_OPCODE();
2985}
2986
2987
2988/**
2989 * @opcode 0x16
2990 * @oppfx 0xf3
2991 * @opcpuid sse3
2992 * @opgroup og_sse3_pcksclr_datamove
2993 * @opxcpttype 4
2994 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2995 * op1=0x00000002000000020000000100000001
2996 */
2997FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2998{
2999 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3001 if (IEM_IS_MODRM_REG_MODE(bRm))
3002 {
3003 /*
3004 * Register, register.
3005 */
3006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3007 IEM_MC_BEGIN(2, 0);
3008 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3009 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3010
3011 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3012 IEM_MC_PREPARE_SSE_USAGE();
3013
3014 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3015 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3016 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3017
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 else
3022 {
3023 /*
3024 * Register, memory.
3025 */
3026 IEM_MC_BEGIN(2, 2);
3027 IEM_MC_LOCAL(RTUINT128U, uSrc);
3028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3029 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3030 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3031
3032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3035 IEM_MC_PREPARE_SSE_USAGE();
3036
3037 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3038 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3039 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3040
3041 IEM_MC_ADVANCE_RIP();
3042 IEM_MC_END();
3043 }
3044 return VINF_SUCCESS;
3045}
3046
3047/**
3048 * @opdone
3049 * @opmnemonic udf30f16
3050 * @opcode 0x16
3051 * @oppfx 0xf2
3052 * @opunused intel-modrm
3053 * @opcpuid sse
3054 * @optest ->
3055 * @opdone
3056 */
3057
3058
3059/**
3060 * @opcode 0x17
3061 * @opcodesub !11 mr/reg
3062 * @oppfx none
3063 * @opcpuid sse
3064 * @opgroup og_sse_simdfp_datamove
3065 * @opxcpttype 5
3066 * @optest op1=1 op2=2 -> op1=2
3067 * @optest op1=0 op2=-42 -> op1=-42
3068 */
3069FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3070{
3071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3072 if (IEM_IS_MODRM_MEM_MODE(bRm))
3073 {
3074 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3075
3076 IEM_MC_BEGIN(0, 2);
3077 IEM_MC_LOCAL(uint64_t, uSrc);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3084
3085 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3086 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3087
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 return VINF_SUCCESS;
3091 }
3092
3093 /**
3094 * @opdone
3095 * @opmnemonic ud0f17m3
3096 * @opcode 0x17
3097 * @opcodesub 11 mr/reg
3098 * @oppfx none
3099 * @opunused immediate
3100 * @opcpuid sse
3101 * @optest ->
3102 */
3103 return IEMOP_RAISE_INVALID_OPCODE();
3104}
3105
3106
3107/**
3108 * @opcode 0x17
3109 * @opcodesub !11 mr/reg
3110 * @oppfx 0x66
3111 * @opcpuid sse2
3112 * @opgroup og_sse2_pcksclr_datamove
3113 * @opxcpttype 5
3114 * @optest op1=1 op2=2 -> op1=2
3115 * @optest op1=0 op2=-42 -> op1=-42
3116 */
3117FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3118{
3119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3120 if (IEM_IS_MODRM_MEM_MODE(bRm))
3121 {
3122 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3123
3124 IEM_MC_BEGIN(0, 2);
3125 IEM_MC_LOCAL(uint64_t, uSrc);
3126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3127
3128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3131 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3132
3133 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3134 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3135
3136 IEM_MC_ADVANCE_RIP();
3137 IEM_MC_END();
3138 return VINF_SUCCESS;
3139 }
3140
3141 /**
3142 * @opdone
3143 * @opmnemonic ud660f17m3
3144 * @opcode 0x17
3145 * @opcodesub 11 mr/reg
3146 * @oppfx 0x66
3147 * @opunused immediate
3148 * @opcpuid sse
3149 * @optest ->
3150 */
3151 return IEMOP_RAISE_INVALID_OPCODE();
3152}
3153
3154
3155/**
3156 * @opdone
3157 * @opmnemonic udf30f17
3158 * @opcode 0x17
3159 * @oppfx 0xf3
3160 * @opunused intel-modrm
3161 * @opcpuid sse
3162 * @optest ->
3163 * @opdone
3164 */
3165
3166/**
3167 * @opmnemonic udf20f17
3168 * @opcode 0x17
3169 * @oppfx 0xf2
3170 * @opunused intel-modrm
3171 * @opcpuid sse
3172 * @optest ->
3173 * @opdone
3174 */
3175
3176
3177/** Opcode 0x0f 0x18. */
3178FNIEMOP_DEF(iemOp_prefetch_Grp16)
3179{
3180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3181 if (IEM_IS_MODRM_MEM_MODE(bRm))
3182 {
3183 switch (IEM_GET_MODRM_REG_8(bRm))
3184 {
3185 case 4: /* Aliased to /0 for the time being according to AMD. */
3186 case 5: /* Aliased to /0 for the time being according to AMD. */
3187 case 6: /* Aliased to /0 for the time being according to AMD. */
3188 case 7: /* Aliased to /0 for the time being according to AMD. */
3189 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3190 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3191 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3192 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3194 }
3195
3196 IEM_MC_BEGIN(0, 1);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3200 /* Currently a NOP. */
3201 NOREF(GCPtrEffSrc);
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 return VINF_SUCCESS;
3205 }
3206
3207 return IEMOP_RAISE_INVALID_OPCODE();
3208}
3209
3210
3211/** Opcode 0x0f 0x19..0x1f. */
3212FNIEMOP_DEF(iemOp_nop_Ev)
3213{
3214 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3216 if (IEM_IS_MODRM_REG_MODE(bRm))
3217 {
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219 IEM_MC_BEGIN(0, 0);
3220 IEM_MC_ADVANCE_RIP();
3221 IEM_MC_END();
3222 }
3223 else
3224 {
3225 IEM_MC_BEGIN(0, 1);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 /* Currently a NOP. */
3230 NOREF(GCPtrEffSrc);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 return VINF_SUCCESS;
3235}
3236
3237
3238/** Opcode 0x0f 0x20. */
3239FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3240{
3241 /* mod is ignored, as is operand size overrides. */
3242 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3243 IEMOP_HLP_MIN_386();
3244 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3245 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3246 else
3247 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3248
3249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3250 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3251 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3252 {
3253 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3254 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3255 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3256 iCrReg |= 8;
3257 }
3258 switch (iCrReg)
3259 {
3260 case 0: case 2: case 3: case 4: case 8:
3261 break;
3262 default:
3263 return IEMOP_RAISE_INVALID_OPCODE();
3264 }
3265 IEMOP_HLP_DONE_DECODING();
3266
3267 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3268}
3269
3270
3271/** Opcode 0x0f 0x21. */
3272FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3273{
3274 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3275 IEMOP_HLP_MIN_386();
3276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3279 return IEMOP_RAISE_INVALID_OPCODE();
3280 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3281 IEM_GET_MODRM_RM(pVCpu, bRm),
3282 IEM_GET_MODRM_REG_8(bRm));
3283}
3284
3285
3286/** Opcode 0x0f 0x22. */
3287FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3288{
3289 /* mod is ignored, as is operand size overrides. */
3290 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3291 IEMOP_HLP_MIN_386();
3292 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3293 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3294 else
3295 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3296
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3300 {
3301 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3302 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3303 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3304 iCrReg |= 8;
3305 }
3306 switch (iCrReg)
3307 {
3308 case 0: case 2: case 3: case 4: case 8:
3309 break;
3310 default:
3311 return IEMOP_RAISE_INVALID_OPCODE();
3312 }
3313 IEMOP_HLP_DONE_DECODING();
3314
3315 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3316}
3317
3318
3319/** Opcode 0x0f 0x23. */
3320FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3321{
3322 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3323 IEMOP_HLP_MIN_386();
3324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3326 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3327 return IEMOP_RAISE_INVALID_OPCODE();
3328 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3329 IEM_GET_MODRM_REG_8(bRm),
3330 IEM_GET_MODRM_RM(pVCpu, bRm));
3331}
3332
3333
3334/** Opcode 0x0f 0x24. */
3335FNIEMOP_DEF(iemOp_mov_Rd_Td)
3336{
3337 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3338 IEMOP_HLP_MIN_386();
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3344 IEM_GET_MODRM_RM(pVCpu, bRm),
3345 IEM_GET_MODRM_REG_8(bRm));
3346}
3347
3348
3349/** Opcode 0x0f 0x26. */
3350FNIEMOP_DEF(iemOp_mov_Td_Rd)
3351{
3352 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3353 IEMOP_HLP_MIN_386();
3354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3356 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3357 return IEMOP_RAISE_INVALID_OPCODE();
3358 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3359 IEM_GET_MODRM_REG_8(bRm),
3360 IEM_GET_MODRM_RM(pVCpu, bRm));
3361}
3362
3363
3364/**
3365 * @opcode 0x28
3366 * @oppfx none
3367 * @opcpuid sse
3368 * @opgroup og_sse_simdfp_datamove
3369 * @opxcpttype 1
3370 * @optest op1=1 op2=2 -> op1=2
3371 * @optest op1=0 op2=-42 -> op1=-42
3372 */
3373FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3374{
3375 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377 if (IEM_IS_MODRM_REG_MODE(bRm))
3378 {
3379 /*
3380 * Register, register.
3381 */
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3383 IEM_MC_BEGIN(0, 0);
3384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3385 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3386 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3387 IEM_GET_MODRM_RM(pVCpu, bRm));
3388 IEM_MC_ADVANCE_RIP();
3389 IEM_MC_END();
3390 }
3391 else
3392 {
3393 /*
3394 * Register, memory.
3395 */
3396 IEM_MC_BEGIN(0, 2);
3397 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3399
3400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3407
3408 IEM_MC_ADVANCE_RIP();
3409 IEM_MC_END();
3410 }
3411 return VINF_SUCCESS;
3412}
3413
3414/**
3415 * @opcode 0x28
3416 * @oppfx 66
3417 * @opcpuid sse2
3418 * @opgroup og_sse2_pcksclr_datamove
3419 * @opxcpttype 1
3420 * @optest op1=1 op2=2 -> op1=2
3421 * @optest op1=0 op2=-42 -> op1=-42
3422 */
3423FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3424{
3425 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3427 if (IEM_IS_MODRM_REG_MODE(bRm))
3428 {
3429 /*
3430 * Register, register.
3431 */
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_BEGIN(0, 0);
3434 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3436 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3437 IEM_GET_MODRM_RM(pVCpu, bRm));
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /*
3444 * Register, memory.
3445 */
3446 IEM_MC_BEGIN(0, 2);
3447 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3449
3450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3452 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3453 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3454
3455 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3456 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3457
3458 IEM_MC_ADVANCE_RIP();
3459 IEM_MC_END();
3460 }
3461 return VINF_SUCCESS;
3462}
3463
3464/* Opcode 0xf3 0x0f 0x28 - invalid */
3465/* Opcode 0xf2 0x0f 0x28 - invalid */
3466
3467/**
3468 * @opcode 0x29
3469 * @oppfx none
3470 * @opcpuid sse
3471 * @opgroup og_sse_simdfp_datamove
3472 * @opxcpttype 1
3473 * @optest op1=1 op2=2 -> op1=2
3474 * @optest op1=0 op2=-42 -> op1=-42
3475 */
3476FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3477{
3478 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3480 if (IEM_IS_MODRM_REG_MODE(bRm))
3481 {
3482 /*
3483 * Register, register.
3484 */
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_BEGIN(0, 0);
3487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3489 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3490 IEM_GET_MODRM_REG(pVCpu, bRm));
3491 IEM_MC_ADVANCE_RIP();
3492 IEM_MC_END();
3493 }
3494 else
3495 {
3496 /*
3497 * Memory, register.
3498 */
3499 IEM_MC_BEGIN(0, 2);
3500 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3507
3508 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3509 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3510
3511 IEM_MC_ADVANCE_RIP();
3512 IEM_MC_END();
3513 }
3514 return VINF_SUCCESS;
3515}
3516
3517/**
3518 * @opcode 0x29
3519 * @oppfx 66
3520 * @opcpuid sse2
3521 * @opgroup og_sse2_pcksclr_datamove
3522 * @opxcpttype 1
3523 * @optest op1=1 op2=2 -> op1=2
3524 * @optest op1=0 op2=-42 -> op1=-42
3525 */
3526FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3527{
3528 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3530 if (IEM_IS_MODRM_REG_MODE(bRm))
3531 {
3532 /*
3533 * Register, register.
3534 */
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_BEGIN(0, 0);
3537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3539 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3540 IEM_GET_MODRM_REG(pVCpu, bRm));
3541 IEM_MC_ADVANCE_RIP();
3542 IEM_MC_END();
3543 }
3544 else
3545 {
3546 /*
3547 * Memory, register.
3548 */
3549 IEM_MC_BEGIN(0, 2);
3550 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3557
3558 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3559 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3560
3561 IEM_MC_ADVANCE_RIP();
3562 IEM_MC_END();
3563 }
3564 return VINF_SUCCESS;
3565}
3566
3567/* Opcode 0xf3 0x0f 0x29 - invalid */
3568/* Opcode 0xf2 0x0f 0x29 - invalid */
3569
3570
3571/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3572FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3573/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3574FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3575/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3576FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3577/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3578FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3579
3580
3581/**
3582 * @opcode 0x2b
3583 * @opcodesub !11 mr/reg
3584 * @oppfx none
3585 * @opcpuid sse
3586 * @opgroup og_sse1_cachect
3587 * @opxcpttype 1
3588 * @optest op1=1 op2=2 -> op1=2
3589 * @optest op1=0 op2=-42 -> op1=-42
3590 */
3591FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3592{
3593 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3595 if (IEM_IS_MODRM_MEM_MODE(bRm))
3596 {
3597 /*
3598 * memory, register.
3599 */
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3603
3604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3607 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3608
3609 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3610 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3611
3612 IEM_MC_ADVANCE_RIP();
3613 IEM_MC_END();
3614 }
3615 /* The register, register encoding is invalid. */
3616 else
3617 return IEMOP_RAISE_INVALID_OPCODE();
3618 return VINF_SUCCESS;
3619}
3620
3621/**
3622 * @opcode 0x2b
3623 * @opcodesub !11 mr/reg
3624 * @oppfx 0x66
3625 * @opcpuid sse2
3626 * @opgroup og_sse2_cachect
3627 * @opxcpttype 1
3628 * @optest op1=1 op2=2 -> op1=2
3629 * @optest op1=0 op2=-42 -> op1=-42
3630 */
3631FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3632{
3633 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3635 if (IEM_IS_MODRM_MEM_MODE(bRm))
3636 {
3637 /*
3638 * memory, register.
3639 */
3640 IEM_MC_BEGIN(0, 2);
3641 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3643
3644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3648
3649 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3650 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 /* The register, register encoding is invalid. */
3656 else
3657 return IEMOP_RAISE_INVALID_OPCODE();
3658 return VINF_SUCCESS;
3659}
3660/* Opcode 0xf3 0x0f 0x2b - invalid */
3661/* Opcode 0xf2 0x0f 0x2b - invalid */
3662
3663
3664/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3665FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3666/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3667FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3668/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3669FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3670/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3671FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3672
3673/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3674FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3675/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3676FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3677/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3678FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3679/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3680FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3681
3682/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3683FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3684/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3685FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3686/* Opcode 0xf3 0x0f 0x2e - invalid */
3687/* Opcode 0xf2 0x0f 0x2e - invalid */
3688
3689/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3690FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3691/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3692FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3693/* Opcode 0xf3 0x0f 0x2f - invalid */
3694/* Opcode 0xf2 0x0f 0x2f - invalid */
3695
3696/** Opcode 0x0f 0x30. */
3697FNIEMOP_DEF(iemOp_wrmsr)
3698{
3699 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3701 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3702}
3703
3704
3705/** Opcode 0x0f 0x31. */
3706FNIEMOP_DEF(iemOp_rdtsc)
3707{
3708 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3710 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3711}
3712
3713
3714/** Opcode 0x0f 0x33. */
3715FNIEMOP_DEF(iemOp_rdmsr)
3716{
3717 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3720}
3721
3722
3723/** Opcode 0x0f 0x34. */
3724FNIEMOP_DEF(iemOp_rdpmc)
3725{
3726 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3729}
3730
3731
3732/** Opcode 0x0f 0x34. */
3733FNIEMOP_DEF(iemOp_sysenter)
3734{
3735 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3738}
3739
3740/** Opcode 0x0f 0x35. */
3741FNIEMOP_DEF(iemOp_sysexit)
3742{
3743 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3745 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3746}
3747
3748/** Opcode 0x0f 0x37. */
3749FNIEMOP_STUB(iemOp_getsec);
3750
3751
3752/** Opcode 0x0f 0x38. */
3753FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3754{
3755#ifdef IEM_WITH_THREE_0F_38
3756 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3757 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3758#else
3759 IEMOP_BITCH_ABOUT_STUB();
3760 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3761#endif
3762}
3763
3764
3765/** Opcode 0x0f 0x3a. */
3766FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3767{
3768#ifdef IEM_WITH_THREE_0F_3A
3769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3770 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3771#else
3772 IEMOP_BITCH_ABOUT_STUB();
3773 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3774#endif
3775}
3776
3777
3778/**
3779 * Implements a conditional move.
3780 *
3781 * Wish there was an obvious way to do this where we could share and reduce
3782 * code bloat.
3783 *
3784 * @param a_Cnd The conditional "microcode" operation.
3785 */
3786#define CMOV_X(a_Cnd) \
3787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3788 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3789 { \
3790 switch (pVCpu->iem.s.enmEffOpSize) \
3791 { \
3792 case IEMMODE_16BIT: \
3793 IEM_MC_BEGIN(0, 1); \
3794 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3795 a_Cnd { \
3796 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3797 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3798 } IEM_MC_ENDIF(); \
3799 IEM_MC_ADVANCE_RIP(); \
3800 IEM_MC_END(); \
3801 return VINF_SUCCESS; \
3802 \
3803 case IEMMODE_32BIT: \
3804 IEM_MC_BEGIN(0, 1); \
3805 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3806 a_Cnd { \
3807 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3808 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3809 } IEM_MC_ELSE() { \
3810 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3811 } IEM_MC_ENDIF(); \
3812 IEM_MC_ADVANCE_RIP(); \
3813 IEM_MC_END(); \
3814 return VINF_SUCCESS; \
3815 \
3816 case IEMMODE_64BIT: \
3817 IEM_MC_BEGIN(0, 1); \
3818 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3819 a_Cnd { \
3820 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3821 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3822 } IEM_MC_ENDIF(); \
3823 IEM_MC_ADVANCE_RIP(); \
3824 IEM_MC_END(); \
3825 return VINF_SUCCESS; \
3826 \
3827 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3828 } \
3829 } \
3830 else \
3831 { \
3832 switch (pVCpu->iem.s.enmEffOpSize) \
3833 { \
3834 case IEMMODE_16BIT: \
3835 IEM_MC_BEGIN(0, 2); \
3836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3837 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3839 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3840 a_Cnd { \
3841 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3842 } IEM_MC_ENDIF(); \
3843 IEM_MC_ADVANCE_RIP(); \
3844 IEM_MC_END(); \
3845 return VINF_SUCCESS; \
3846 \
3847 case IEMMODE_32BIT: \
3848 IEM_MC_BEGIN(0, 2); \
3849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3850 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3852 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3853 a_Cnd { \
3854 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3855 } IEM_MC_ELSE() { \
3856 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3857 } IEM_MC_ENDIF(); \
3858 IEM_MC_ADVANCE_RIP(); \
3859 IEM_MC_END(); \
3860 return VINF_SUCCESS; \
3861 \
3862 case IEMMODE_64BIT: \
3863 IEM_MC_BEGIN(0, 2); \
3864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3865 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3867 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3868 a_Cnd { \
3869 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3870 } IEM_MC_ENDIF(); \
3871 IEM_MC_ADVANCE_RIP(); \
3872 IEM_MC_END(); \
3873 return VINF_SUCCESS; \
3874 \
3875 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3876 } \
3877 } do {} while (0)
3878
3879
3880
3881/** Opcode 0x0f 0x40. */
3882FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3883{
3884 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3885 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3886}
3887
3888
3889/** Opcode 0x0f 0x41. */
3890FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3891{
3892 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3893 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3894}
3895
3896
3897/** Opcode 0x0f 0x42. */
3898FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3899{
3900 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3901 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3902}
3903
3904
3905/** Opcode 0x0f 0x43. */
3906FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3907{
3908 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3909 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3910}
3911
3912
3913/** Opcode 0x0f 0x44. */
3914FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3915{
3916 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3917 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3918}
3919
3920
3921/** Opcode 0x0f 0x45. */
3922FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3923{
3924 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3925 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3926}
3927
3928
3929/** Opcode 0x0f 0x46. */
3930FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3931{
3932 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3933 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3934}
3935
3936
3937/** Opcode 0x0f 0x47. */
3938FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3939{
3940 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3941 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3942}
3943
3944
3945/** Opcode 0x0f 0x48. */
3946FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3947{
3948 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3949 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3950}
3951
3952
3953/** Opcode 0x0f 0x49. */
3954FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3955{
3956 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3957 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3958}
3959
3960
3961/** Opcode 0x0f 0x4a. */
3962FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3963{
3964 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3965 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3966}
3967
3968
3969/** Opcode 0x0f 0x4b. */
3970FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3971{
3972 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3973 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3974}
3975
3976
3977/** Opcode 0x0f 0x4c. */
3978FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3979{
3980 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3981 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3982}
3983
3984
3985/** Opcode 0x0f 0x4d. */
3986FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3987{
3988 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3989 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3990}
3991
3992
3993/** Opcode 0x0f 0x4e. */
3994FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3995{
3996 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3997 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3998}
3999
4000
4001/** Opcode 0x0f 0x4f. */
4002FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
4003{
4004 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
4005 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
4006}
4007
4008#undef CMOV_X
4009
4010/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
4011FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
4012/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
4013FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
4014/* Opcode 0xf3 0x0f 0x50 - invalid */
4015/* Opcode 0xf2 0x0f 0x50 - invalid */
4016
4017
4018/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
4019FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
4020{
4021 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4022 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
4023}
4024
4025
4026/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
4027FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
4028{
4029 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4030 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
4031}
4032
4033
4034/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
4035FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
4036{
4037 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4038 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
4039}
4040
4041
4042/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
4043FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
4044{
4045 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4046 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
4047}
4048
4049
4050/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
4051FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
4052/* Opcode 0x66 0x0f 0x52 - invalid */
4053/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
4054FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
4055/* Opcode 0xf2 0x0f 0x52 - invalid */
4056
4057/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
4058FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
4059/* Opcode 0x66 0x0f 0x53 - invalid */
4060/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
4061FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
4062/* Opcode 0xf2 0x0f 0x53 - invalid */
4063
4064
4065/** Opcode 0x0f 0x54 - andps Vps, Wps */
4066FNIEMOP_DEF(iemOp_andps_Vps_Wps)
4067{
4068 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4069 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
4070}
4071
4072
4073/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
4074FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
4075{
4076 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4077 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
4078}
4079
4080
4081/* Opcode 0xf3 0x0f 0x54 - invalid */
4082/* Opcode 0xf2 0x0f 0x54 - invalid */
4083
4084
4085/** Opcode 0x0f 0x55 - andnps Vps, Wps */
4086FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
4087{
4088 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4089 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4090}
4091
4092
4093/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
4094FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
4095{
4096 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4097 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4098}
4099
4100
4101/* Opcode 0xf3 0x0f 0x55 - invalid */
4102/* Opcode 0xf2 0x0f 0x55 - invalid */
4103
4104
4105/** Opcode 0x0f 0x56 - orps Vps, Wps */
4106FNIEMOP_DEF(iemOp_orps_Vps_Wps)
4107{
4108 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4109 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4110}
4111
4112
4113/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
4114FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
4115{
4116 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4117 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4118}
4119
4120
4121/* Opcode 0xf3 0x0f 0x56 - invalid */
4122/* Opcode 0xf2 0x0f 0x56 - invalid */
4123
4124
4125/** Opcode 0x0f 0x57 - xorps Vps, Wps */
4126FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
4127{
4128 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4129 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4130}
4131
4132
4133/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
4134FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
4135{
4136 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4137 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4138}
4139
4140
4141/* Opcode 0xf3 0x0f 0x57 - invalid */
4142/* Opcode 0xf2 0x0f 0x57 - invalid */
4143
4144/** Opcode 0x0f 0x58 - addps Vps, Wps */
4145FNIEMOP_DEF(iemOp_addps_Vps_Wps)
4146{
4147 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4148 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
4149}
4150
4151
4152/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
4153FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
4154{
4155 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4156 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
4157}
4158
4159
4160/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
4161FNIEMOP_DEF(iemOp_addss_Vss_Wss)
4162{
4163 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4164 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
4165}
4166
4167
4168/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
4169FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
4170{
4171 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4172 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
4173}
4174
4175
4176/** Opcode 0x0f 0x59 - mulps Vps, Wps */
4177FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
4178{
4179 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4180 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
4181}
4182
4183
4184/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
4185FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
4186{
4187 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4188 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
4189}
4190
4191
4192/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
4193FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
4194{
4195 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4196 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
4197}
4198
4199
4200/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
4201FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
4202{
4203 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4204 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
4205}
4206
4207
4208/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
4209FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
4210/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
4211FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
4212
4213
4214/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
4215FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
4216{
4217 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
4218 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
4219}
4220
4221
4222/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
4223FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
4224{
4225 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
4226 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
4227}
4228
4229
4230/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
4231FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
4232/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
4233FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
4234/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
4235FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
4236/* Opcode 0xf2 0x0f 0x5b - invalid */
4237
4238
4239/** Opcode 0x0f 0x5c - subps Vps, Wps */
4240FNIEMOP_DEF(iemOp_subps_Vps_Wps)
4241{
4242 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4243 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
4244}
4245
4246
4247/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
4248FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
4249{
4250 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4251 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
4252}
4253
4254
4255/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4256FNIEMOP_DEF(iemOp_subss_Vss_Wss)
4257{
4258 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4259 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
4260}
4261
4262
4263/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4264FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
4265{
4266 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4267 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
4268}
4269
4270
4271/** Opcode 0x0f 0x5d - minps Vps, Wps */
4272FNIEMOP_DEF(iemOp_minps_Vps_Wps)
4273{
4274 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4275 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
4276}
4277
4278
4279/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4280FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
4281{
4282 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4283 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
4284}
4285
4286
4287/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4288FNIEMOP_DEF(iemOp_minss_Vss_Wss)
4289{
4290 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4291 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
4292}
4293
4294
4295/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4296FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
4297{
4298 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4299 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
4300}
4301
4302
4303/** Opcode 0x0f 0x5e - divps Vps, Wps */
4304FNIEMOP_DEF(iemOp_divps_Vps_Wps)
4305{
4306 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4307 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
4308}
4309
4310
4311/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4312FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
4313{
4314 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4315 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
4316}
4317
4318
4319/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4320FNIEMOP_DEF(iemOp_divss_Vss_Wss)
4321{
4322 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4323 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
4324}
4325
4326
4327/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4328FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
4329{
4330 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4331 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
4332}
4333
4334
4335/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4336FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
4337{
4338 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4339 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
4340}
4341
4342
4343/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4344FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
4345{
4346 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4347 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
4348}
4349
4350
4351/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4352FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
4353{
4354 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4355 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
4356}
4357
4358
4359/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4360FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
4361{
4362 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4363 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
4364}
4365
4366
4367/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4368FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4369{
4370 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4371 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4372}
4373
4374
4375/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4376FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4377{
4378 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4379 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4380}
4381
4382
4383/* Opcode 0xf3 0x0f 0x60 - invalid */
4384
4385
4386/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4387FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4388{
4389 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4390 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4391 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4392}
4393
4394
4395/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4396FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4397{
4398 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4399 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4400}
4401
4402
4403/* Opcode 0xf3 0x0f 0x61 - invalid */
4404
4405
4406/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4407FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4408{
4409 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4410 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4411}
4412
4413
4414/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4415FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4416{
4417 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4418 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4419}
4420
4421
4422/* Opcode 0xf3 0x0f 0x62 - invalid */
4423
4424
4425
4426/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4427FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4428{
4429 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4430 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4431}
4432
4433
4434/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4435FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4436{
4437 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4438 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4439}
4440
4441
4442/* Opcode 0xf3 0x0f 0x63 - invalid */
4443
4444
4445/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4446FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4447{
4448 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4449 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4450}
4451
4452
4453/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4454FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4455{
4456 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4457 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4458}
4459
4460
4461/* Opcode 0xf3 0x0f 0x64 - invalid */
4462
4463
4464/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4465FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4466{
4467 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4468 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4469}
4470
4471
4472/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4473FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4474{
4475 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4476 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4477}
4478
4479
4480/* Opcode 0xf3 0x0f 0x65 - invalid */
4481
4482
4483/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4484FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4485{
4486 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4487 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4488}
4489
4490
4491/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4492FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4493{
4494 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4495 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4496}
4497
4498
4499/* Opcode 0xf3 0x0f 0x66 - invalid */
4500
4501
4502/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4503FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4504{
4505 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4506 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4507}
4508
4509
4510/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4511FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4512{
4513 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4514 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4515}
4516
4517
4518/* Opcode 0xf3 0x0f 0x67 - invalid */
4519
4520
4521/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4522 * @note Intel and AMD both uses Qd for the second parameter, however they
4523 * both list it as a mmX/mem64 operand and intel describes it as being
4524 * loaded as a qword, so it should be Qq, shouldn't it? */
4525FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4526{
4527 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4528 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4529}
4530
4531
4532/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4533FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4534{
4535 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4536 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4537}
4538
4539
4540/* Opcode 0xf3 0x0f 0x68 - invalid */
4541
4542
4543/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4544 * @note Intel and AMD both uses Qd for the second parameter, however they
4545 * both list it as a mmX/mem64 operand and intel describes it as being
4546 * loaded as a qword, so it should be Qq, shouldn't it? */
4547FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4548{
4549 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4550 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4551}
4552
4553
4554/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4555FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4556{
4557 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4558 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4559
4560}
4561
4562
4563/* Opcode 0xf3 0x0f 0x69 - invalid */
4564
4565
4566/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4567 * @note Intel and AMD both uses Qd for the second parameter, however they
4568 * both list it as a mmX/mem64 operand and intel describes it as being
4569 * loaded as a qword, so it should be Qq, shouldn't it? */
4570FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4571{
4572 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4573 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4574}
4575
4576
4577/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4578FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4579{
4580 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4581 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4582}
4583
4584
4585/* Opcode 0xf3 0x0f 0x6a - invalid */
4586
4587
4588/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4589FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4590{
4591 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4592 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4593}
4594
4595
4596/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4597FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4598{
4599 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4600 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4601}
4602
4603
4604/* Opcode 0xf3 0x0f 0x6b - invalid */
4605
4606
4607/* Opcode 0x0f 0x6c - invalid */
4608
4609
4610/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4611FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4612{
4613 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4614 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4615}
4616
4617
4618/* Opcode 0xf3 0x0f 0x6c - invalid */
4619/* Opcode 0xf2 0x0f 0x6c - invalid */
4620
4621
4622/* Opcode 0x0f 0x6d - invalid */
4623
4624
4625/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4626FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4627{
4628 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4629 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4630}
4631
4632
4633/* Opcode 0xf3 0x0f 0x6d - invalid */
4634
4635
4636FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4637{
4638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4639 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4640 {
4641 /**
4642 * @opcode 0x6e
4643 * @opcodesub rex.w=1
4644 * @oppfx none
4645 * @opcpuid mmx
4646 * @opgroup og_mmx_datamove
4647 * @opxcpttype 5
4648 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4649 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4650 */
4651 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4652 if (IEM_IS_MODRM_REG_MODE(bRm))
4653 {
4654 /* MMX, greg64 */
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4656 IEM_MC_BEGIN(0, 1);
4657 IEM_MC_LOCAL(uint64_t, u64Tmp);
4658
4659 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4660 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4661
4662 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4663 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4664 IEM_MC_FPU_TO_MMX_MODE();
4665
4666 IEM_MC_ADVANCE_RIP();
4667 IEM_MC_END();
4668 }
4669 else
4670 {
4671 /* MMX, [mem64] */
4672 IEM_MC_BEGIN(0, 2);
4673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4674 IEM_MC_LOCAL(uint64_t, u64Tmp);
4675
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4679 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4680
4681 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4682 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4683 IEM_MC_FPU_TO_MMX_MODE();
4684
4685 IEM_MC_ADVANCE_RIP();
4686 IEM_MC_END();
4687 }
4688 }
4689 else
4690 {
4691 /**
4692 * @opdone
4693 * @opcode 0x6e
4694 * @opcodesub rex.w=0
4695 * @oppfx none
4696 * @opcpuid mmx
4697 * @opgroup og_mmx_datamove
4698 * @opxcpttype 5
4699 * @opfunction iemOp_movd_q_Pd_Ey
4700 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4701 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4702 */
4703 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4704 if (IEM_IS_MODRM_REG_MODE(bRm))
4705 {
4706 /* MMX, greg */
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_BEGIN(0, 1);
4709 IEM_MC_LOCAL(uint64_t, u64Tmp);
4710
4711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4712 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4713
4714 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4715 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4716 IEM_MC_FPU_TO_MMX_MODE();
4717
4718 IEM_MC_ADVANCE_RIP();
4719 IEM_MC_END();
4720 }
4721 else
4722 {
4723 /* MMX, [mem] */
4724 IEM_MC_BEGIN(0, 2);
4725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4726 IEM_MC_LOCAL(uint32_t, u32Tmp);
4727
4728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4731 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4732
4733 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4734 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4735 IEM_MC_FPU_TO_MMX_MODE();
4736
4737 IEM_MC_ADVANCE_RIP();
4738 IEM_MC_END();
4739 }
4740 }
4741 return VINF_SUCCESS;
4742}
4743
4744FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4745{
4746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4747 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4748 {
4749 /**
4750 * @opcode 0x6e
4751 * @opcodesub rex.w=1
4752 * @oppfx 0x66
4753 * @opcpuid sse2
4754 * @opgroup og_sse2_simdint_datamove
4755 * @opxcpttype 5
4756 * @optest 64-bit / op1=1 op2=2 -> op1=2
4757 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4758 */
4759 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4760 if (IEM_IS_MODRM_REG_MODE(bRm))
4761 {
4762 /* XMM, greg64 */
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4764 IEM_MC_BEGIN(0, 1);
4765 IEM_MC_LOCAL(uint64_t, u64Tmp);
4766
4767 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4768 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4769
4770 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4771 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4772
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 }
4776 else
4777 {
4778 /* XMM, [mem64] */
4779 IEM_MC_BEGIN(0, 2);
4780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4781 IEM_MC_LOCAL(uint64_t, u64Tmp);
4782
4783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4785 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4786 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4787
4788 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4789 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4790
4791 IEM_MC_ADVANCE_RIP();
4792 IEM_MC_END();
4793 }
4794 }
4795 else
4796 {
4797 /**
4798 * @opdone
4799 * @opcode 0x6e
4800 * @opcodesub rex.w=0
4801 * @oppfx 0x66
4802 * @opcpuid sse2
4803 * @opgroup og_sse2_simdint_datamove
4804 * @opxcpttype 5
4805 * @opfunction iemOp_movd_q_Vy_Ey
4806 * @optest op1=1 op2=2 -> op1=2
4807 * @optest op1=0 op2=-42 -> op1=-42
4808 */
4809 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4810 if (IEM_IS_MODRM_REG_MODE(bRm))
4811 {
4812 /* XMM, greg32 */
4813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4814 IEM_MC_BEGIN(0, 1);
4815 IEM_MC_LOCAL(uint32_t, u32Tmp);
4816
4817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4818 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4819
4820 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4821 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4822
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* XMM, [mem32] */
4829 IEM_MC_BEGIN(0, 2);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, u32Tmp);
4832
4833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4835 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4836 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4837
4838 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4839 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4840
4841 IEM_MC_ADVANCE_RIP();
4842 IEM_MC_END();
4843 }
4844 }
4845 return VINF_SUCCESS;
4846}
4847
4848/* Opcode 0xf3 0x0f 0x6e - invalid */
4849
4850
4851/**
4852 * @opcode 0x6f
4853 * @oppfx none
4854 * @opcpuid mmx
4855 * @opgroup og_mmx_datamove
4856 * @opxcpttype 5
4857 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4858 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4859 */
4860FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4861{
4862 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4864 if (IEM_IS_MODRM_REG_MODE(bRm))
4865 {
4866 /*
4867 * Register, register.
4868 */
4869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4870 IEM_MC_BEGIN(0, 1);
4871 IEM_MC_LOCAL(uint64_t, u64Tmp);
4872
4873 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4874 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4875
4876 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4877 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4878 IEM_MC_FPU_TO_MMX_MODE();
4879
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 }
4883 else
4884 {
4885 /*
4886 * Register, memory.
4887 */
4888 IEM_MC_BEGIN(0, 2);
4889 IEM_MC_LOCAL(uint64_t, u64Tmp);
4890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4891
4892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4895 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4896
4897 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4898 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4899 IEM_MC_FPU_TO_MMX_MODE();
4900
4901 IEM_MC_ADVANCE_RIP();
4902 IEM_MC_END();
4903 }
4904 return VINF_SUCCESS;
4905}
4906
4907/**
4908 * @opcode 0x6f
4909 * @oppfx 0x66
4910 * @opcpuid sse2
4911 * @opgroup og_sse2_simdint_datamove
4912 * @opxcpttype 1
4913 * @optest op1=1 op2=2 -> op1=2
4914 * @optest op1=0 op2=-42 -> op1=-42
4915 */
4916FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4917{
4918 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4920 if (IEM_IS_MODRM_REG_MODE(bRm))
4921 {
4922 /*
4923 * Register, register.
4924 */
4925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4926 IEM_MC_BEGIN(0, 0);
4927
4928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4930
4931 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4932 IEM_GET_MODRM_RM(pVCpu, bRm));
4933 IEM_MC_ADVANCE_RIP();
4934 IEM_MC_END();
4935 }
4936 else
4937 {
4938 /*
4939 * Register, memory.
4940 */
4941 IEM_MC_BEGIN(0, 2);
4942 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4944
4945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4947 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4948 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4949
4950 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4951 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4952
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 }
4956 return VINF_SUCCESS;
4957}
4958
4959/**
4960 * @opcode 0x6f
4961 * @oppfx 0xf3
4962 * @opcpuid sse2
4963 * @opgroup og_sse2_simdint_datamove
4964 * @opxcpttype 4UA
4965 * @optest op1=1 op2=2 -> op1=2
4966 * @optest op1=0 op2=-42 -> op1=-42
4967 */
4968FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4969{
4970 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4972 if (IEM_IS_MODRM_REG_MODE(bRm))
4973 {
4974 /*
4975 * Register, register.
4976 */
4977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4978 IEM_MC_BEGIN(0, 0);
4979 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4980 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4981 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4982 IEM_GET_MODRM_RM(pVCpu, bRm));
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 }
4986 else
4987 {
4988 /*
4989 * Register, memory.
4990 */
4991 IEM_MC_BEGIN(0, 2);
4992 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4994
4995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4997 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4998 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4999 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5000 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5001
5002 IEM_MC_ADVANCE_RIP();
5003 IEM_MC_END();
5004 }
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
5010FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
5011{
5012 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5014 if (IEM_IS_MODRM_REG_MODE(bRm))
5015 {
5016 /*
5017 * Register, register.
5018 */
5019 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5021
5022 IEM_MC_BEGIN(3, 0);
5023 IEM_MC_ARG(uint64_t *, pDst, 0);
5024 IEM_MC_ARG(uint64_t const *, pSrc, 1);
5025 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5026 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
5027 IEM_MC_PREPARE_FPU_USAGE();
5028 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5029 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
5030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
5031 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5032 IEM_MC_FPU_TO_MMX_MODE();
5033 IEM_MC_ADVANCE_RIP();
5034 IEM_MC_END();
5035 }
5036 else
5037 {
5038 /*
5039 * Register, memory.
5040 */
5041 IEM_MC_BEGIN(3, 2);
5042 IEM_MC_ARG(uint64_t *, pDst, 0);
5043 IEM_MC_LOCAL(uint64_t, uSrc);
5044 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
5045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5046
5047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5048 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5049 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5051 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
5052
5053 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5054 IEM_MC_PREPARE_FPU_USAGE();
5055 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5056 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
5057 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5058 IEM_MC_FPU_TO_MMX_MODE();
5059
5060 IEM_MC_ADVANCE_RIP();
5061 IEM_MC_END();
5062 }
5063 return VINF_SUCCESS;
5064}
5065
5066
5067/**
5068 * Common worker for SSE2 instructions on the forms:
5069 * pshufd xmm1, xmm2/mem128, imm8
5070 * pshufhw xmm1, xmm2/mem128, imm8
5071 * pshuflw xmm1, xmm2/mem128, imm8
5072 *
5073 * Proper alignment of the 128-bit operand is enforced.
5074 * Exceptions type 4. SSE2 cpuid checks.
5075 */
5076FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
5077{
5078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5079 if (IEM_IS_MODRM_REG_MODE(bRm))
5080 {
5081 /*
5082 * Register, register.
5083 */
5084 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5086
5087 IEM_MC_BEGIN(3, 0);
5088 IEM_MC_ARG(PRTUINT128U, puDst, 0);
5089 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5090 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5091 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5092 IEM_MC_PREPARE_SSE_USAGE();
5093 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5094 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5095 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
5096 IEM_MC_ADVANCE_RIP();
5097 IEM_MC_END();
5098 }
5099 else
5100 {
5101 /*
5102 * Register, memory.
5103 */
5104 IEM_MC_BEGIN(3, 2);
5105 IEM_MC_ARG(PRTUINT128U, puDst, 0);
5106 IEM_MC_LOCAL(RTUINT128U, uSrc);
5107 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
5108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5109
5110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5111 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5112 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5114 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5115
5116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5117 IEM_MC_PREPARE_SSE_USAGE();
5118 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5119 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
5120
5121 IEM_MC_ADVANCE_RIP();
5122 IEM_MC_END();
5123 }
5124 return VINF_SUCCESS;
5125}
5126
5127
5128/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
5129FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
5130{
5131 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5132 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
5133}
5134
5135
5136/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
5137FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
5138{
5139 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5140 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
5141}
5142
5143
5144/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
5145FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
5146{
5147 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5148 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
5149}
5150
5151
5152/**
5153 * Common worker for MMX instructions of the form:
5154 * psrlw mm, imm8
5155 * psraw mm, imm8
5156 * psllw mm, imm8
5157 * psrld mm, imm8
5158 * psrad mm, imm8
5159 * pslld mm, imm8
5160 * psrlq mm, imm8
5161 * psllq mm, imm8
5162 *
5163 */
5164FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
5165{
5166 if (IEM_IS_MODRM_REG_MODE(bRm))
5167 {
5168 /*
5169 * Register, immediate.
5170 */
5171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173
5174 IEM_MC_BEGIN(2, 0);
5175 IEM_MC_ARG(uint64_t *, pDst, 0);
5176 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5177 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5178 IEM_MC_PREPARE_FPU_USAGE();
5179 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5180 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
5181 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5182 IEM_MC_FPU_TO_MMX_MODE();
5183 IEM_MC_ADVANCE_RIP();
5184 IEM_MC_END();
5185 }
5186 else
5187 {
5188 /*
5189 * Register, memory not supported.
5190 */
5191 /// @todo Caller already enforced register mode?!
5192 }
5193 return VINF_SUCCESS;
5194}
5195
5196
5197/**
5198 * Common worker for SSE2 instructions of the form:
5199 * psrlw xmm, imm8
5200 * psraw xmm, imm8
5201 * psllw xmm, imm8
5202 * psrld xmm, imm8
5203 * psrad xmm, imm8
5204 * pslld xmm, imm8
5205 * psrlq xmm, imm8
5206 * psllq xmm, imm8
5207 *
5208 */
5209FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
5210{
5211 if (IEM_IS_MODRM_REG_MODE(bRm))
5212 {
5213 /*
5214 * Register, immediate.
5215 */
5216 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5218
5219 IEM_MC_BEGIN(2, 0);
5220 IEM_MC_ARG(PRTUINT128U, pDst, 0);
5221 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5223 IEM_MC_PREPARE_SSE_USAGE();
5224 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5225 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
5226 IEM_MC_ADVANCE_RIP();
5227 IEM_MC_END();
5228 }
5229 else
5230 {
5231 /*
5232 * Register, memory.
5233 */
5234 /// @todo Caller already enforced register mode?!
5235 }
5236 return VINF_SUCCESS;
5237}
5238
5239
5240/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
5241FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
5242{
5243// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5244 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
5245}
5246
5247
5248/** Opcode 0x66 0x0f 0x71 11/2. */
5249FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
5250{
5251// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5252 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
5253}
5254
5255
5256/** Opcode 0x0f 0x71 11/4. */
5257FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
5258{
5259// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5260 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
5261}
5262
5263
5264/** Opcode 0x66 0x0f 0x71 11/4. */
5265FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
5266{
5267// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5268 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
5269}
5270
5271
5272/** Opcode 0x0f 0x71 11/6. */
5273FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
5274{
5275// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5276 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
5277}
5278
5279
5280/** Opcode 0x66 0x0f 0x71 11/6. */
5281FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
5282{
5283// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5284 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
5285}
5286
5287
5288/**
5289 * Group 12 jump table for register variant.
5290 */
5291IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
5292{
5293 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5294 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5295 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5296 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5297 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5298 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5299 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5300 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5301};
5302AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
5303
5304
5305/** Opcode 0x0f 0x71. */
5306FNIEMOP_DEF(iemOp_Grp12)
5307{
5308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5309 if (IEM_IS_MODRM_REG_MODE(bRm))
5310 /* register, register */
5311 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5312 + pVCpu->iem.s.idxPrefix], bRm);
5313 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5314}
5315
5316
5317/** Opcode 0x0f 0x72 11/2. */
5318FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
5319{
5320// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5321 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
5322}
5323
5324
5325/** Opcode 0x66 0x0f 0x72 11/2. */
5326FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
5327{
5328// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5329 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
5330}
5331
5332
5333/** Opcode 0x0f 0x72 11/4. */
5334FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5335{
5336// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5337 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5338}
5339
5340
5341/** Opcode 0x66 0x0f 0x72 11/4. */
5342FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5343{
5344// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5345 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5346}
5347
5348
5349/** Opcode 0x0f 0x72 11/6. */
5350FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5351{
5352// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5353 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5354}
5355
5356/** Opcode 0x66 0x0f 0x72 11/6. */
5357FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5358{
5359// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5360 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5361}
5362
5363
5364/**
5365 * Group 13 jump table for register variant.
5366 */
5367IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5368{
5369 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5370 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5371 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5372 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5373 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5374 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5375 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5376 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5377};
5378AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5379
5380/** Opcode 0x0f 0x72. */
5381FNIEMOP_DEF(iemOp_Grp13)
5382{
5383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5384 if (IEM_IS_MODRM_REG_MODE(bRm))
5385 /* register, register */
5386 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5387 + pVCpu->iem.s.idxPrefix], bRm);
5388 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5389}
5390
5391
5392/** Opcode 0x0f 0x73 11/2. */
5393FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5394{
5395// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5396 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5397}
5398
5399
5400/** Opcode 0x66 0x0f 0x73 11/2. */
5401FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5402{
5403// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5404 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5405}
5406
5407
5408/** Opcode 0x66 0x0f 0x73 11/3. */
5409FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5410{
5411// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5412 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5413}
5414
5415
5416/** Opcode 0x0f 0x73 11/6. */
5417FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5418{
5419// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5420 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5421}
5422
5423
5424/** Opcode 0x66 0x0f 0x73 11/6. */
5425FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5426{
5427// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5428 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5429}
5430
5431
5432/** Opcode 0x66 0x0f 0x73 11/7. */
5433FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5434{
5435// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5436 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5437}
5438
5439/**
5440 * Group 14 jump table for register variant.
5441 */
5442IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5443{
5444 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5445 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5446 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5447 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5448 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5449 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5450 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5451 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5452};
5453AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5454
5455
5456/** Opcode 0x0f 0x73. */
5457FNIEMOP_DEF(iemOp_Grp14)
5458{
5459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5460 if (IEM_IS_MODRM_REG_MODE(bRm))
5461 /* register, register */
5462 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5463 + pVCpu->iem.s.idxPrefix], bRm);
5464 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5465}
5466
5467
5468/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5469FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5470{
5471 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5472 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5473}
5474
5475
5476/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5477FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5478{
5479 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5480 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5481}
5482
5483
5484/* Opcode 0xf3 0x0f 0x74 - invalid */
5485/* Opcode 0xf2 0x0f 0x74 - invalid */
5486
5487
5488/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5489FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5490{
5491 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5492 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5493}
5494
5495
5496/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5497FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5498{
5499 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5500 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5501}
5502
5503
5504/* Opcode 0xf3 0x0f 0x75 - invalid */
5505/* Opcode 0xf2 0x0f 0x75 - invalid */
5506
5507
5508/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5509FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5510{
5511 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5512 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5513}
5514
5515
5516/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5517FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5518{
5519 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5520 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5521}
5522
5523
5524/* Opcode 0xf3 0x0f 0x76 - invalid */
5525/* Opcode 0xf2 0x0f 0x76 - invalid */
5526
5527
5528/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5529FNIEMOP_DEF(iemOp_emms)
5530{
5531 IEMOP_MNEMONIC(emms, "emms");
5532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5533
5534 IEM_MC_BEGIN(0,0);
5535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5537 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5538 IEM_MC_FPU_FROM_MMX_MODE();
5539 IEM_MC_ADVANCE_RIP();
5540 IEM_MC_END();
5541 return VINF_SUCCESS;
5542}
5543
5544/* Opcode 0x66 0x0f 0x77 - invalid */
5545/* Opcode 0xf3 0x0f 0x77 - invalid */
5546/* Opcode 0xf2 0x0f 0x77 - invalid */
5547
5548/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5549#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5550FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5551{
5552 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5553 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5554 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5555 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5556
5557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5558 if (IEM_IS_MODRM_REG_MODE(bRm))
5559 {
5560 /*
5561 * Register, register.
5562 */
5563 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5564 if (enmEffOpSize == IEMMODE_64BIT)
5565 {
5566 IEM_MC_BEGIN(2, 0);
5567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5568 IEM_MC_ARG(uint64_t, u64Enc, 1);
5569 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5570 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5571 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5572 IEM_MC_END();
5573 }
5574 else
5575 {
5576 IEM_MC_BEGIN(2, 0);
5577 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5578 IEM_MC_ARG(uint32_t, u32Enc, 1);
5579 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5580 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5581 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5582 IEM_MC_END();
5583 }
5584 }
5585 else
5586 {
5587 /*
5588 * Memory, register.
5589 */
5590 if (enmEffOpSize == IEMMODE_64BIT)
5591 {
5592 IEM_MC_BEGIN(3, 0);
5593 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5594 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5595 IEM_MC_ARG(uint64_t, u64Enc, 2);
5596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5597 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5598 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5599 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5600 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5601 IEM_MC_END();
5602 }
5603 else
5604 {
5605 IEM_MC_BEGIN(3, 0);
5606 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5607 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5608 IEM_MC_ARG(uint32_t, u32Enc, 2);
5609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5610 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5611 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5612 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5613 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5614 IEM_MC_END();
5615 }
5616 }
5617 return VINF_SUCCESS;
5618}
5619#else
5620FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5621#endif
5622
5623/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5624FNIEMOP_STUB(iemOp_AmdGrp17);
5625/* Opcode 0xf3 0x0f 0x78 - invalid */
5626/* Opcode 0xf2 0x0f 0x78 - invalid */
5627
5628/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5629#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5630FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5631{
5632 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5633 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5634 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5635 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5636
5637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5638 if (IEM_IS_MODRM_REG_MODE(bRm))
5639 {
5640 /*
5641 * Register, register.
5642 */
5643 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5644 if (enmEffOpSize == IEMMODE_64BIT)
5645 {
5646 IEM_MC_BEGIN(2, 0);
5647 IEM_MC_ARG(uint64_t, u64Val, 0);
5648 IEM_MC_ARG(uint64_t, u64Enc, 1);
5649 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5650 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5651 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5652 IEM_MC_END();
5653 }
5654 else
5655 {
5656 IEM_MC_BEGIN(2, 0);
5657 IEM_MC_ARG(uint32_t, u32Val, 0);
5658 IEM_MC_ARG(uint32_t, u32Enc, 1);
5659 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5660 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5661 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5662 IEM_MC_END();
5663 }
5664 }
5665 else
5666 {
5667 /*
5668 * Register, memory.
5669 */
5670 if (enmEffOpSize == IEMMODE_64BIT)
5671 {
5672 IEM_MC_BEGIN(3, 0);
5673 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5674 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5675 IEM_MC_ARG(uint64_t, u64Enc, 2);
5676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5677 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5678 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5679 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5680 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5681 IEM_MC_END();
5682 }
5683 else
5684 {
5685 IEM_MC_BEGIN(3, 0);
5686 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5687 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5688 IEM_MC_ARG(uint32_t, u32Enc, 2);
5689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5690 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5691 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5692 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5693 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5694 IEM_MC_END();
5695 }
5696 }
5697 return VINF_SUCCESS;
5698}
5699#else
5700FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5701#endif
5702/* Opcode 0x66 0x0f 0x79 - invalid */
5703/* Opcode 0xf3 0x0f 0x79 - invalid */
5704/* Opcode 0xf2 0x0f 0x79 - invalid */
5705
5706/* Opcode 0x0f 0x7a - invalid */
5707/* Opcode 0x66 0x0f 0x7a - invalid */
5708/* Opcode 0xf3 0x0f 0x7a - invalid */
5709/* Opcode 0xf2 0x0f 0x7a - invalid */
5710
5711/* Opcode 0x0f 0x7b - invalid */
5712/* Opcode 0x66 0x0f 0x7b - invalid */
5713/* Opcode 0xf3 0x0f 0x7b - invalid */
5714/* Opcode 0xf2 0x0f 0x7b - invalid */
5715
5716/* Opcode 0x0f 0x7c - invalid */
5717
5718
5719/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5720FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
5721{
5722 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5723 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
5724}
5725
5726
5727/* Opcode 0xf3 0x0f 0x7c - invalid */
5728
5729
5730/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5731FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
5732{
5733 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5734 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
5735}
5736
5737
5738/* Opcode 0x0f 0x7d - invalid */
5739
5740
5741/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5742FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
5743{
5744 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
5746}
5747
5748
5749/* Opcode 0xf3 0x0f 0x7d - invalid */
5750
5751
5752/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5753FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
5754{
5755 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5756 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
5757}
5758
5759
5760/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5761FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5762{
5763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5765 {
5766 /**
5767 * @opcode 0x7e
5768 * @opcodesub rex.w=1
5769 * @oppfx none
5770 * @opcpuid mmx
5771 * @opgroup og_mmx_datamove
5772 * @opxcpttype 5
5773 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5774 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5775 */
5776 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5777 if (IEM_IS_MODRM_REG_MODE(bRm))
5778 {
5779 /* greg64, MMX */
5780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5781 IEM_MC_BEGIN(0, 1);
5782 IEM_MC_LOCAL(uint64_t, u64Tmp);
5783
5784 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5785 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5786
5787 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5788 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5789 IEM_MC_FPU_TO_MMX_MODE();
5790
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 }
5794 else
5795 {
5796 /* [mem64], MMX */
5797 IEM_MC_BEGIN(0, 2);
5798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5799 IEM_MC_LOCAL(uint64_t, u64Tmp);
5800
5801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5803 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5804 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5805
5806 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5807 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5808 IEM_MC_FPU_TO_MMX_MODE();
5809
5810 IEM_MC_ADVANCE_RIP();
5811 IEM_MC_END();
5812 }
5813 }
5814 else
5815 {
5816 /**
5817 * @opdone
5818 * @opcode 0x7e
5819 * @opcodesub rex.w=0
5820 * @oppfx none
5821 * @opcpuid mmx
5822 * @opgroup og_mmx_datamove
5823 * @opxcpttype 5
5824 * @opfunction iemOp_movd_q_Pd_Ey
5825 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5826 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5827 */
5828 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5829 if (IEM_IS_MODRM_REG_MODE(bRm))
5830 {
5831 /* greg32, MMX */
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833 IEM_MC_BEGIN(0, 1);
5834 IEM_MC_LOCAL(uint32_t, u32Tmp);
5835
5836 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5837 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5838
5839 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5840 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5841 IEM_MC_FPU_TO_MMX_MODE();
5842
5843 IEM_MC_ADVANCE_RIP();
5844 IEM_MC_END();
5845 }
5846 else
5847 {
5848 /* [mem32], MMX */
5849 IEM_MC_BEGIN(0, 2);
5850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5851 IEM_MC_LOCAL(uint32_t, u32Tmp);
5852
5853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5855 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5856 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5857
5858 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5859 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5860 IEM_MC_FPU_TO_MMX_MODE();
5861
5862 IEM_MC_ADVANCE_RIP();
5863 IEM_MC_END();
5864 }
5865 }
5866 return VINF_SUCCESS;
5867
5868}
5869
5870
5871FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5872{
5873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5875 {
5876 /**
5877 * @opcode 0x7e
5878 * @opcodesub rex.w=1
5879 * @oppfx 0x66
5880 * @opcpuid sse2
5881 * @opgroup og_sse2_simdint_datamove
5882 * @opxcpttype 5
5883 * @optest 64-bit / op1=1 op2=2 -> op1=2
5884 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5885 */
5886 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5887 if (IEM_IS_MODRM_REG_MODE(bRm))
5888 {
5889 /* greg64, XMM */
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891 IEM_MC_BEGIN(0, 1);
5892 IEM_MC_LOCAL(uint64_t, u64Tmp);
5893
5894 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5896
5897 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5898 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5899
5900 IEM_MC_ADVANCE_RIP();
5901 IEM_MC_END();
5902 }
5903 else
5904 {
5905 /* [mem64], XMM */
5906 IEM_MC_BEGIN(0, 2);
5907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5908 IEM_MC_LOCAL(uint64_t, u64Tmp);
5909
5910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5913 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5914
5915 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5916 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5917
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 }
5921 }
5922 else
5923 {
5924 /**
5925 * @opdone
5926 * @opcode 0x7e
5927 * @opcodesub rex.w=0
5928 * @oppfx 0x66
5929 * @opcpuid sse2
5930 * @opgroup og_sse2_simdint_datamove
5931 * @opxcpttype 5
5932 * @opfunction iemOp_movd_q_Vy_Ey
5933 * @optest op1=1 op2=2 -> op1=2
5934 * @optest op1=0 op2=-42 -> op1=-42
5935 */
5936 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5937 if (IEM_IS_MODRM_REG_MODE(bRm))
5938 {
5939 /* greg32, XMM */
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5941 IEM_MC_BEGIN(0, 1);
5942 IEM_MC_LOCAL(uint32_t, u32Tmp);
5943
5944 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5945 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5946
5947 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5948 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5949
5950 IEM_MC_ADVANCE_RIP();
5951 IEM_MC_END();
5952 }
5953 else
5954 {
5955 /* [mem32], XMM */
5956 IEM_MC_BEGIN(0, 2);
5957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5958 IEM_MC_LOCAL(uint32_t, u32Tmp);
5959
5960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5962 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5964
5965 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5966 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5967
5968 IEM_MC_ADVANCE_RIP();
5969 IEM_MC_END();
5970 }
5971 }
5972 return VINF_SUCCESS;
5973
5974}
5975
5976/**
5977 * @opcode 0x7e
5978 * @oppfx 0xf3
5979 * @opcpuid sse2
5980 * @opgroup og_sse2_pcksclr_datamove
5981 * @opxcpttype none
5982 * @optest op1=1 op2=2 -> op1=2
5983 * @optest op1=0 op2=-42 -> op1=-42
5984 */
5985FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5986{
5987 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5989 if (IEM_IS_MODRM_REG_MODE(bRm))
5990 {
5991 /*
5992 * Register, register.
5993 */
5994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5995 IEM_MC_BEGIN(0, 2);
5996 IEM_MC_LOCAL(uint64_t, uSrc);
5997
5998 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5999 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6000
6001 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6002 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
6003
6004 IEM_MC_ADVANCE_RIP();
6005 IEM_MC_END();
6006 }
6007 else
6008 {
6009 /*
6010 * Memory, register.
6011 */
6012 IEM_MC_BEGIN(0, 2);
6013 IEM_MC_LOCAL(uint64_t, uSrc);
6014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6015
6016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6020
6021 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6022 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
6023
6024 IEM_MC_ADVANCE_RIP();
6025 IEM_MC_END();
6026 }
6027 return VINF_SUCCESS;
6028}
6029
6030/* Opcode 0xf2 0x0f 0x7e - invalid */
6031
6032
6033/** Opcode 0x0f 0x7f - movq Qq, Pq */
6034FNIEMOP_DEF(iemOp_movq_Qq_Pq)
6035{
6036 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
6037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6038 if (IEM_IS_MODRM_REG_MODE(bRm))
6039 {
6040 /*
6041 * Register, register.
6042 */
6043 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
6044 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 IEM_MC_BEGIN(0, 1);
6047 IEM_MC_LOCAL(uint64_t, u64Tmp);
6048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6049 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6050 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6051 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
6052 IEM_MC_FPU_TO_MMX_MODE();
6053 IEM_MC_ADVANCE_RIP();
6054 IEM_MC_END();
6055 }
6056 else
6057 {
6058 /*
6059 * Memory, Register.
6060 */
6061 IEM_MC_BEGIN(0, 2);
6062 IEM_MC_LOCAL(uint64_t, u64Tmp);
6063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6064
6065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6067 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6069
6070 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6071 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6072 IEM_MC_FPU_TO_MMX_MODE();
6073
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 }
6077 return VINF_SUCCESS;
6078}
6079
6080/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
6081FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
6082{
6083 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6085 if (IEM_IS_MODRM_REG_MODE(bRm))
6086 {
6087 /*
6088 * Register, register.
6089 */
6090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6091 IEM_MC_BEGIN(0, 0);
6092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6094 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
6095 IEM_GET_MODRM_REG(pVCpu, bRm));
6096 IEM_MC_ADVANCE_RIP();
6097 IEM_MC_END();
6098 }
6099 else
6100 {
6101 /*
6102 * Register, memory.
6103 */
6104 IEM_MC_BEGIN(0, 2);
6105 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6107
6108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6112
6113 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6114 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
6115
6116 IEM_MC_ADVANCE_RIP();
6117 IEM_MC_END();
6118 }
6119 return VINF_SUCCESS;
6120}
6121
6122/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
6123FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
6124{
6125 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6127 if (IEM_IS_MODRM_REG_MODE(bRm))
6128 {
6129 /*
6130 * Register, register.
6131 */
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6133 IEM_MC_BEGIN(0, 0);
6134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6135 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6136 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
6137 IEM_GET_MODRM_REG(pVCpu, bRm));
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 }
6141 else
6142 {
6143 /*
6144 * Register, memory.
6145 */
6146 IEM_MC_BEGIN(0, 2);
6147 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6149
6150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6154
6155 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6156 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
6157
6158 IEM_MC_ADVANCE_RIP();
6159 IEM_MC_END();
6160 }
6161 return VINF_SUCCESS;
6162}
6163
6164/* Opcode 0xf2 0x0f 0x7f - invalid */
6165
6166
6167
6168/** Opcode 0x0f 0x80. */
6169FNIEMOP_DEF(iemOp_jo_Jv)
6170{
6171 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
6172 IEMOP_HLP_MIN_386();
6173 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6174 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6175 {
6176 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6178
6179 IEM_MC_BEGIN(0, 0);
6180 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6181 IEM_MC_REL_JMP_S16(i16Imm);
6182 } IEM_MC_ELSE() {
6183 IEM_MC_ADVANCE_RIP();
6184 } IEM_MC_ENDIF();
6185 IEM_MC_END();
6186 }
6187 else
6188 {
6189 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6191
6192 IEM_MC_BEGIN(0, 0);
6193 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6194 IEM_MC_REL_JMP_S32(i32Imm);
6195 } IEM_MC_ELSE() {
6196 IEM_MC_ADVANCE_RIP();
6197 } IEM_MC_ENDIF();
6198 IEM_MC_END();
6199 }
6200 return VINF_SUCCESS;
6201}
6202
6203
6204/** Opcode 0x0f 0x81. */
6205FNIEMOP_DEF(iemOp_jno_Jv)
6206{
6207 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
6208 IEMOP_HLP_MIN_386();
6209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6210 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6211 {
6212 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214
6215 IEM_MC_BEGIN(0, 0);
6216 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6217 IEM_MC_ADVANCE_RIP();
6218 } IEM_MC_ELSE() {
6219 IEM_MC_REL_JMP_S16(i16Imm);
6220 } IEM_MC_ENDIF();
6221 IEM_MC_END();
6222 }
6223 else
6224 {
6225 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6227
6228 IEM_MC_BEGIN(0, 0);
6229 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6230 IEM_MC_ADVANCE_RIP();
6231 } IEM_MC_ELSE() {
6232 IEM_MC_REL_JMP_S32(i32Imm);
6233 } IEM_MC_ENDIF();
6234 IEM_MC_END();
6235 }
6236 return VINF_SUCCESS;
6237}
6238
6239
6240/** Opcode 0x0f 0x82. */
6241FNIEMOP_DEF(iemOp_jc_Jv)
6242{
6243 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
6244 IEMOP_HLP_MIN_386();
6245 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6246 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6247 {
6248 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6250
6251 IEM_MC_BEGIN(0, 0);
6252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6253 IEM_MC_REL_JMP_S16(i16Imm);
6254 } IEM_MC_ELSE() {
6255 IEM_MC_ADVANCE_RIP();
6256 } IEM_MC_ENDIF();
6257 IEM_MC_END();
6258 }
6259 else
6260 {
6261 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6263
6264 IEM_MC_BEGIN(0, 0);
6265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6266 IEM_MC_REL_JMP_S32(i32Imm);
6267 } IEM_MC_ELSE() {
6268 IEM_MC_ADVANCE_RIP();
6269 } IEM_MC_ENDIF();
6270 IEM_MC_END();
6271 }
6272 return VINF_SUCCESS;
6273}
6274
6275
6276/** Opcode 0x0f 0x83. */
6277FNIEMOP_DEF(iemOp_jnc_Jv)
6278{
6279 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
6280 IEMOP_HLP_MIN_386();
6281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6282 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6283 {
6284 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286
6287 IEM_MC_BEGIN(0, 0);
6288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6289 IEM_MC_ADVANCE_RIP();
6290 } IEM_MC_ELSE() {
6291 IEM_MC_REL_JMP_S16(i16Imm);
6292 } IEM_MC_ENDIF();
6293 IEM_MC_END();
6294 }
6295 else
6296 {
6297 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6299
6300 IEM_MC_BEGIN(0, 0);
6301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6302 IEM_MC_ADVANCE_RIP();
6303 } IEM_MC_ELSE() {
6304 IEM_MC_REL_JMP_S32(i32Imm);
6305 } IEM_MC_ENDIF();
6306 IEM_MC_END();
6307 }
6308 return VINF_SUCCESS;
6309}
6310
6311
6312/** Opcode 0x0f 0x84. */
6313FNIEMOP_DEF(iemOp_je_Jv)
6314{
6315 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
6316 IEMOP_HLP_MIN_386();
6317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6318 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6319 {
6320 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6322
6323 IEM_MC_BEGIN(0, 0);
6324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6325 IEM_MC_REL_JMP_S16(i16Imm);
6326 } IEM_MC_ELSE() {
6327 IEM_MC_ADVANCE_RIP();
6328 } IEM_MC_ENDIF();
6329 IEM_MC_END();
6330 }
6331 else
6332 {
6333 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335
6336 IEM_MC_BEGIN(0, 0);
6337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6338 IEM_MC_REL_JMP_S32(i32Imm);
6339 } IEM_MC_ELSE() {
6340 IEM_MC_ADVANCE_RIP();
6341 } IEM_MC_ENDIF();
6342 IEM_MC_END();
6343 }
6344 return VINF_SUCCESS;
6345}
6346
6347
6348/** Opcode 0x0f 0x85. */
6349FNIEMOP_DEF(iemOp_jne_Jv)
6350{
6351 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
6352 IEMOP_HLP_MIN_386();
6353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6354 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6355 {
6356 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6358
6359 IEM_MC_BEGIN(0, 0);
6360 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6361 IEM_MC_ADVANCE_RIP();
6362 } IEM_MC_ELSE() {
6363 IEM_MC_REL_JMP_S16(i16Imm);
6364 } IEM_MC_ENDIF();
6365 IEM_MC_END();
6366 }
6367 else
6368 {
6369 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6371
6372 IEM_MC_BEGIN(0, 0);
6373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6374 IEM_MC_ADVANCE_RIP();
6375 } IEM_MC_ELSE() {
6376 IEM_MC_REL_JMP_S32(i32Imm);
6377 } IEM_MC_ENDIF();
6378 IEM_MC_END();
6379 }
6380 return VINF_SUCCESS;
6381}
6382
6383
6384/** Opcode 0x0f 0x86. */
6385FNIEMOP_DEF(iemOp_jbe_Jv)
6386{
6387 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6388 IEMOP_HLP_MIN_386();
6389 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6390 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6391 {
6392 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6394
6395 IEM_MC_BEGIN(0, 0);
6396 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6397 IEM_MC_REL_JMP_S16(i16Imm);
6398 } IEM_MC_ELSE() {
6399 IEM_MC_ADVANCE_RIP();
6400 } IEM_MC_ENDIF();
6401 IEM_MC_END();
6402 }
6403 else
6404 {
6405 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407
6408 IEM_MC_BEGIN(0, 0);
6409 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6410 IEM_MC_REL_JMP_S32(i32Imm);
6411 } IEM_MC_ELSE() {
6412 IEM_MC_ADVANCE_RIP();
6413 } IEM_MC_ENDIF();
6414 IEM_MC_END();
6415 }
6416 return VINF_SUCCESS;
6417}
6418
6419
6420/** Opcode 0x0f 0x87. */
6421FNIEMOP_DEF(iemOp_jnbe_Jv)
6422{
6423 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6424 IEMOP_HLP_MIN_386();
6425 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6426 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6427 {
6428 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430
6431 IEM_MC_BEGIN(0, 0);
6432 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6433 IEM_MC_ADVANCE_RIP();
6434 } IEM_MC_ELSE() {
6435 IEM_MC_REL_JMP_S16(i16Imm);
6436 } IEM_MC_ENDIF();
6437 IEM_MC_END();
6438 }
6439 else
6440 {
6441 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6443
6444 IEM_MC_BEGIN(0, 0);
6445 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6446 IEM_MC_ADVANCE_RIP();
6447 } IEM_MC_ELSE() {
6448 IEM_MC_REL_JMP_S32(i32Imm);
6449 } IEM_MC_ENDIF();
6450 IEM_MC_END();
6451 }
6452 return VINF_SUCCESS;
6453}
6454
6455
6456/** Opcode 0x0f 0x88. */
6457FNIEMOP_DEF(iemOp_js_Jv)
6458{
6459 IEMOP_MNEMONIC(js_Jv, "js Jv");
6460 IEMOP_HLP_MIN_386();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6463 {
6464 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466
6467 IEM_MC_BEGIN(0, 0);
6468 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6469 IEM_MC_REL_JMP_S16(i16Imm);
6470 } IEM_MC_ELSE() {
6471 IEM_MC_ADVANCE_RIP();
6472 } IEM_MC_ENDIF();
6473 IEM_MC_END();
6474 }
6475 else
6476 {
6477 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479
6480 IEM_MC_BEGIN(0, 0);
6481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6482 IEM_MC_REL_JMP_S32(i32Imm);
6483 } IEM_MC_ELSE() {
6484 IEM_MC_ADVANCE_RIP();
6485 } IEM_MC_ENDIF();
6486 IEM_MC_END();
6487 }
6488 return VINF_SUCCESS;
6489}
6490
6491
6492/** Opcode 0x0f 0x89. */
6493FNIEMOP_DEF(iemOp_jns_Jv)
6494{
6495 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6496 IEMOP_HLP_MIN_386();
6497 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6498 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6499 {
6500 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502
6503 IEM_MC_BEGIN(0, 0);
6504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6505 IEM_MC_ADVANCE_RIP();
6506 } IEM_MC_ELSE() {
6507 IEM_MC_REL_JMP_S16(i16Imm);
6508 } IEM_MC_ENDIF();
6509 IEM_MC_END();
6510 }
6511 else
6512 {
6513 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6515
6516 IEM_MC_BEGIN(0, 0);
6517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6518 IEM_MC_ADVANCE_RIP();
6519 } IEM_MC_ELSE() {
6520 IEM_MC_REL_JMP_S32(i32Imm);
6521 } IEM_MC_ENDIF();
6522 IEM_MC_END();
6523 }
6524 return VINF_SUCCESS;
6525}
6526
6527
6528/** Opcode 0x0f 0x8a. */
6529FNIEMOP_DEF(iemOp_jp_Jv)
6530{
6531 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6532 IEMOP_HLP_MIN_386();
6533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6534 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6535 {
6536 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6538
6539 IEM_MC_BEGIN(0, 0);
6540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6541 IEM_MC_REL_JMP_S16(i16Imm);
6542 } IEM_MC_ELSE() {
6543 IEM_MC_ADVANCE_RIP();
6544 } IEM_MC_ENDIF();
6545 IEM_MC_END();
6546 }
6547 else
6548 {
6549 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6551
6552 IEM_MC_BEGIN(0, 0);
6553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6554 IEM_MC_REL_JMP_S32(i32Imm);
6555 } IEM_MC_ELSE() {
6556 IEM_MC_ADVANCE_RIP();
6557 } IEM_MC_ENDIF();
6558 IEM_MC_END();
6559 }
6560 return VINF_SUCCESS;
6561}
6562
6563
6564/** Opcode 0x0f 0x8b. */
6565FNIEMOP_DEF(iemOp_jnp_Jv)
6566{
6567 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6568 IEMOP_HLP_MIN_386();
6569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6570 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6571 {
6572 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 IEM_MC_BEGIN(0, 0);
6576 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6577 IEM_MC_ADVANCE_RIP();
6578 } IEM_MC_ELSE() {
6579 IEM_MC_REL_JMP_S16(i16Imm);
6580 } IEM_MC_ENDIF();
6581 IEM_MC_END();
6582 }
6583 else
6584 {
6585 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6587
6588 IEM_MC_BEGIN(0, 0);
6589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6590 IEM_MC_ADVANCE_RIP();
6591 } IEM_MC_ELSE() {
6592 IEM_MC_REL_JMP_S32(i32Imm);
6593 } IEM_MC_ENDIF();
6594 IEM_MC_END();
6595 }
6596 return VINF_SUCCESS;
6597}
6598
6599
6600/** Opcode 0x0f 0x8c. */
6601FNIEMOP_DEF(iemOp_jl_Jv)
6602{
6603 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6604 IEMOP_HLP_MIN_386();
6605 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6607 {
6608 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6610
6611 IEM_MC_BEGIN(0, 0);
6612 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6613 IEM_MC_REL_JMP_S16(i16Imm);
6614 } IEM_MC_ELSE() {
6615 IEM_MC_ADVANCE_RIP();
6616 } IEM_MC_ENDIF();
6617 IEM_MC_END();
6618 }
6619 else
6620 {
6621 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623
6624 IEM_MC_BEGIN(0, 0);
6625 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6626 IEM_MC_REL_JMP_S32(i32Imm);
6627 } IEM_MC_ELSE() {
6628 IEM_MC_ADVANCE_RIP();
6629 } IEM_MC_ENDIF();
6630 IEM_MC_END();
6631 }
6632 return VINF_SUCCESS;
6633}
6634
6635
6636/** Opcode 0x0f 0x8d. */
6637FNIEMOP_DEF(iemOp_jnl_Jv)
6638{
6639 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6640 IEMOP_HLP_MIN_386();
6641 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6642 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6643 {
6644 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6646
6647 IEM_MC_BEGIN(0, 0);
6648 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6649 IEM_MC_ADVANCE_RIP();
6650 } IEM_MC_ELSE() {
6651 IEM_MC_REL_JMP_S16(i16Imm);
6652 } IEM_MC_ENDIF();
6653 IEM_MC_END();
6654 }
6655 else
6656 {
6657 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6659
6660 IEM_MC_BEGIN(0, 0);
6661 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6662 IEM_MC_ADVANCE_RIP();
6663 } IEM_MC_ELSE() {
6664 IEM_MC_REL_JMP_S32(i32Imm);
6665 } IEM_MC_ENDIF();
6666 IEM_MC_END();
6667 }
6668 return VINF_SUCCESS;
6669}
6670
6671
6672/** Opcode 0x0f 0x8e. */
6673FNIEMOP_DEF(iemOp_jle_Jv)
6674{
6675 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6676 IEMOP_HLP_MIN_386();
6677 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6678 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6679 {
6680 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6682
6683 IEM_MC_BEGIN(0, 0);
6684 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6685 IEM_MC_REL_JMP_S16(i16Imm);
6686 } IEM_MC_ELSE() {
6687 IEM_MC_ADVANCE_RIP();
6688 } IEM_MC_ENDIF();
6689 IEM_MC_END();
6690 }
6691 else
6692 {
6693 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6695
6696 IEM_MC_BEGIN(0, 0);
6697 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6698 IEM_MC_REL_JMP_S32(i32Imm);
6699 } IEM_MC_ELSE() {
6700 IEM_MC_ADVANCE_RIP();
6701 } IEM_MC_ENDIF();
6702 IEM_MC_END();
6703 }
6704 return VINF_SUCCESS;
6705}
6706
6707
6708/** Opcode 0x0f 0x8f. */
6709FNIEMOP_DEF(iemOp_jnle_Jv)
6710{
6711 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6712 IEMOP_HLP_MIN_386();
6713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6714 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6715 {
6716 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6718
6719 IEM_MC_BEGIN(0, 0);
6720 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6721 IEM_MC_ADVANCE_RIP();
6722 } IEM_MC_ELSE() {
6723 IEM_MC_REL_JMP_S16(i16Imm);
6724 } IEM_MC_ENDIF();
6725 IEM_MC_END();
6726 }
6727 else
6728 {
6729 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6731
6732 IEM_MC_BEGIN(0, 0);
6733 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6734 IEM_MC_ADVANCE_RIP();
6735 } IEM_MC_ELSE() {
6736 IEM_MC_REL_JMP_S32(i32Imm);
6737 } IEM_MC_ENDIF();
6738 IEM_MC_END();
6739 }
6740 return VINF_SUCCESS;
6741}
6742
6743
6744/** Opcode 0x0f 0x90. */
6745FNIEMOP_DEF(iemOp_seto_Eb)
6746{
6747 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6748 IEMOP_HLP_MIN_386();
6749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6750
6751 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6752 * any way. AMD says it's "unused", whatever that means. We're
6753 * ignoring for now. */
6754 if (IEM_IS_MODRM_REG_MODE(bRm))
6755 {
6756 /* register target */
6757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6758 IEM_MC_BEGIN(0, 0);
6759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6760 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6761 } IEM_MC_ELSE() {
6762 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6763 } IEM_MC_ENDIF();
6764 IEM_MC_ADVANCE_RIP();
6765 IEM_MC_END();
6766 }
6767 else
6768 {
6769 /* memory target */
6770 IEM_MC_BEGIN(0, 1);
6771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6774 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6775 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6776 } IEM_MC_ELSE() {
6777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6778 } IEM_MC_ENDIF();
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 }
6782 return VINF_SUCCESS;
6783}
6784
6785
6786/** Opcode 0x0f 0x91. */
6787FNIEMOP_DEF(iemOp_setno_Eb)
6788{
6789 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6790 IEMOP_HLP_MIN_386();
6791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6792
6793 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6794 * any way. AMD says it's "unused", whatever that means. We're
6795 * ignoring for now. */
6796 if (IEM_IS_MODRM_REG_MODE(bRm))
6797 {
6798 /* register target */
6799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6800 IEM_MC_BEGIN(0, 0);
6801 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6802 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6803 } IEM_MC_ELSE() {
6804 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6805 } IEM_MC_ENDIF();
6806 IEM_MC_ADVANCE_RIP();
6807 IEM_MC_END();
6808 }
6809 else
6810 {
6811 /* memory target */
6812 IEM_MC_BEGIN(0, 1);
6813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6817 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6818 } IEM_MC_ELSE() {
6819 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6820 } IEM_MC_ENDIF();
6821 IEM_MC_ADVANCE_RIP();
6822 IEM_MC_END();
6823 }
6824 return VINF_SUCCESS;
6825}
6826
6827
6828/** Opcode 0x0f 0x92. */
6829FNIEMOP_DEF(iemOp_setc_Eb)
6830{
6831 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6832 IEMOP_HLP_MIN_386();
6833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6834
6835 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6836 * any way. AMD says it's "unused", whatever that means. We're
6837 * ignoring for now. */
6838 if (IEM_IS_MODRM_REG_MODE(bRm))
6839 {
6840 /* register target */
6841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6842 IEM_MC_BEGIN(0, 0);
6843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6844 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6845 } IEM_MC_ELSE() {
6846 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6847 } IEM_MC_ENDIF();
6848 IEM_MC_ADVANCE_RIP();
6849 IEM_MC_END();
6850 }
6851 else
6852 {
6853 /* memory target */
6854 IEM_MC_BEGIN(0, 1);
6855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6859 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6860 } IEM_MC_ELSE() {
6861 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6862 } IEM_MC_ENDIF();
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 }
6866 return VINF_SUCCESS;
6867}
6868
6869
6870/** Opcode 0x0f 0x93. */
6871FNIEMOP_DEF(iemOp_setnc_Eb)
6872{
6873 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6874 IEMOP_HLP_MIN_386();
6875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6876
6877 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6878 * any way. AMD says it's "unused", whatever that means. We're
6879 * ignoring for now. */
6880 if (IEM_IS_MODRM_REG_MODE(bRm))
6881 {
6882 /* register target */
6883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6884 IEM_MC_BEGIN(0, 0);
6885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6886 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6887 } IEM_MC_ELSE() {
6888 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6889 } IEM_MC_ENDIF();
6890 IEM_MC_ADVANCE_RIP();
6891 IEM_MC_END();
6892 }
6893 else
6894 {
6895 /* memory target */
6896 IEM_MC_BEGIN(0, 1);
6897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6900 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6901 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6902 } IEM_MC_ELSE() {
6903 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6904 } IEM_MC_ENDIF();
6905 IEM_MC_ADVANCE_RIP();
6906 IEM_MC_END();
6907 }
6908 return VINF_SUCCESS;
6909}
6910
6911
6912/** Opcode 0x0f 0x94. */
6913FNIEMOP_DEF(iemOp_sete_Eb)
6914{
6915 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6916 IEMOP_HLP_MIN_386();
6917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6918
6919 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6920 * any way. AMD says it's "unused", whatever that means. We're
6921 * ignoring for now. */
6922 if (IEM_IS_MODRM_REG_MODE(bRm))
6923 {
6924 /* register target */
6925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6926 IEM_MC_BEGIN(0, 0);
6927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6928 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6929 } IEM_MC_ELSE() {
6930 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6931 } IEM_MC_ENDIF();
6932 IEM_MC_ADVANCE_RIP();
6933 IEM_MC_END();
6934 }
6935 else
6936 {
6937 /* memory target */
6938 IEM_MC_BEGIN(0, 1);
6939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6943 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6944 } IEM_MC_ELSE() {
6945 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6946 } IEM_MC_ENDIF();
6947 IEM_MC_ADVANCE_RIP();
6948 IEM_MC_END();
6949 }
6950 return VINF_SUCCESS;
6951}
6952
6953
6954/** Opcode 0x0f 0x95. */
6955FNIEMOP_DEF(iemOp_setne_Eb)
6956{
6957 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6958 IEMOP_HLP_MIN_386();
6959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6960
6961 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6962 * any way. AMD says it's "unused", whatever that means. We're
6963 * ignoring for now. */
6964 if (IEM_IS_MODRM_REG_MODE(bRm))
6965 {
6966 /* register target */
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 IEM_MC_BEGIN(0, 0);
6969 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6970 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6971 } IEM_MC_ELSE() {
6972 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6973 } IEM_MC_ENDIF();
6974 IEM_MC_ADVANCE_RIP();
6975 IEM_MC_END();
6976 }
6977 else
6978 {
6979 /* memory target */
6980 IEM_MC_BEGIN(0, 1);
6981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6985 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6986 } IEM_MC_ELSE() {
6987 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6988 } IEM_MC_ENDIF();
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 }
6992 return VINF_SUCCESS;
6993}
6994
6995
6996/** Opcode 0x0f 0x96. */
6997FNIEMOP_DEF(iemOp_setbe_Eb)
6998{
6999 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
7000 IEMOP_HLP_MIN_386();
7001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7002
7003 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7004 * any way. AMD says it's "unused", whatever that means. We're
7005 * ignoring for now. */
7006 if (IEM_IS_MODRM_REG_MODE(bRm))
7007 {
7008 /* register target */
7009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7010 IEM_MC_BEGIN(0, 0);
7011 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7012 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7013 } IEM_MC_ELSE() {
7014 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7015 } IEM_MC_ENDIF();
7016 IEM_MC_ADVANCE_RIP();
7017 IEM_MC_END();
7018 }
7019 else
7020 {
7021 /* memory target */
7022 IEM_MC_BEGIN(0, 1);
7023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7026 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7027 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7028 } IEM_MC_ELSE() {
7029 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7030 } IEM_MC_ENDIF();
7031 IEM_MC_ADVANCE_RIP();
7032 IEM_MC_END();
7033 }
7034 return VINF_SUCCESS;
7035}
7036
7037
7038/** Opcode 0x0f 0x97. */
7039FNIEMOP_DEF(iemOp_setnbe_Eb)
7040{
7041 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
7042 IEMOP_HLP_MIN_386();
7043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7044
7045 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7046 * any way. AMD says it's "unused", whatever that means. We're
7047 * ignoring for now. */
7048 if (IEM_IS_MODRM_REG_MODE(bRm))
7049 {
7050 /* register target */
7051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7052 IEM_MC_BEGIN(0, 0);
7053 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7054 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7055 } IEM_MC_ELSE() {
7056 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7057 } IEM_MC_ENDIF();
7058 IEM_MC_ADVANCE_RIP();
7059 IEM_MC_END();
7060 }
7061 else
7062 {
7063 /* memory target */
7064 IEM_MC_BEGIN(0, 1);
7065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7068 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7069 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7070 } IEM_MC_ELSE() {
7071 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7072 } IEM_MC_ENDIF();
7073 IEM_MC_ADVANCE_RIP();
7074 IEM_MC_END();
7075 }
7076 return VINF_SUCCESS;
7077}
7078
7079
7080/** Opcode 0x0f 0x98. */
7081FNIEMOP_DEF(iemOp_sets_Eb)
7082{
7083 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
7084 IEMOP_HLP_MIN_386();
7085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7086
7087 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7088 * any way. AMD says it's "unused", whatever that means. We're
7089 * ignoring for now. */
7090 if (IEM_IS_MODRM_REG_MODE(bRm))
7091 {
7092 /* register target */
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7094 IEM_MC_BEGIN(0, 0);
7095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7096 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7097 } IEM_MC_ELSE() {
7098 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7099 } IEM_MC_ENDIF();
7100 IEM_MC_ADVANCE_RIP();
7101 IEM_MC_END();
7102 }
7103 else
7104 {
7105 /* memory target */
7106 IEM_MC_BEGIN(0, 1);
7107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7111 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7112 } IEM_MC_ELSE() {
7113 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7114 } IEM_MC_ENDIF();
7115 IEM_MC_ADVANCE_RIP();
7116 IEM_MC_END();
7117 }
7118 return VINF_SUCCESS;
7119}
7120
7121
7122/** Opcode 0x0f 0x99. */
7123FNIEMOP_DEF(iemOp_setns_Eb)
7124{
7125 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
7126 IEMOP_HLP_MIN_386();
7127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7128
7129 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7130 * any way. AMD says it's "unused", whatever that means. We're
7131 * ignoring for now. */
7132 if (IEM_IS_MODRM_REG_MODE(bRm))
7133 {
7134 /* register target */
7135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7136 IEM_MC_BEGIN(0, 0);
7137 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7138 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7139 } IEM_MC_ELSE() {
7140 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7141 } IEM_MC_ENDIF();
7142 IEM_MC_ADVANCE_RIP();
7143 IEM_MC_END();
7144 }
7145 else
7146 {
7147 /* memory target */
7148 IEM_MC_BEGIN(0, 1);
7149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7153 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7154 } IEM_MC_ELSE() {
7155 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7156 } IEM_MC_ENDIF();
7157 IEM_MC_ADVANCE_RIP();
7158 IEM_MC_END();
7159 }
7160 return VINF_SUCCESS;
7161}
7162
7163
7164/** Opcode 0x0f 0x9a. */
7165FNIEMOP_DEF(iemOp_setp_Eb)
7166{
7167 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
7168 IEMOP_HLP_MIN_386();
7169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7170
7171 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7172 * any way. AMD says it's "unused", whatever that means. We're
7173 * ignoring for now. */
7174 if (IEM_IS_MODRM_REG_MODE(bRm))
7175 {
7176 /* register target */
7177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7178 IEM_MC_BEGIN(0, 0);
7179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7180 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7181 } IEM_MC_ELSE() {
7182 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7183 } IEM_MC_ENDIF();
7184 IEM_MC_ADVANCE_RIP();
7185 IEM_MC_END();
7186 }
7187 else
7188 {
7189 /* memory target */
7190 IEM_MC_BEGIN(0, 1);
7191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7195 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7196 } IEM_MC_ELSE() {
7197 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7198 } IEM_MC_ENDIF();
7199 IEM_MC_ADVANCE_RIP();
7200 IEM_MC_END();
7201 }
7202 return VINF_SUCCESS;
7203}
7204
7205
7206/** Opcode 0x0f 0x9b. */
7207FNIEMOP_DEF(iemOp_setnp_Eb)
7208{
7209 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
7210 IEMOP_HLP_MIN_386();
7211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7212
7213 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7214 * any way. AMD says it's "unused", whatever that means. We're
7215 * ignoring for now. */
7216 if (IEM_IS_MODRM_REG_MODE(bRm))
7217 {
7218 /* register target */
7219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7220 IEM_MC_BEGIN(0, 0);
7221 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7222 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7223 } IEM_MC_ELSE() {
7224 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7225 } IEM_MC_ENDIF();
7226 IEM_MC_ADVANCE_RIP();
7227 IEM_MC_END();
7228 }
7229 else
7230 {
7231 /* memory target */
7232 IEM_MC_BEGIN(0, 1);
7233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7237 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7238 } IEM_MC_ELSE() {
7239 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7240 } IEM_MC_ENDIF();
7241 IEM_MC_ADVANCE_RIP();
7242 IEM_MC_END();
7243 }
7244 return VINF_SUCCESS;
7245}
7246
7247
7248/** Opcode 0x0f 0x9c. */
7249FNIEMOP_DEF(iemOp_setl_Eb)
7250{
7251 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
7252 IEMOP_HLP_MIN_386();
7253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7254
7255 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7256 * any way. AMD says it's "unused", whatever that means. We're
7257 * ignoring for now. */
7258 if (IEM_IS_MODRM_REG_MODE(bRm))
7259 {
7260 /* register target */
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 IEM_MC_BEGIN(0, 0);
7263 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7264 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7265 } IEM_MC_ELSE() {
7266 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7267 } IEM_MC_ENDIF();
7268 IEM_MC_ADVANCE_RIP();
7269 IEM_MC_END();
7270 }
7271 else
7272 {
7273 /* memory target */
7274 IEM_MC_BEGIN(0, 1);
7275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7278 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7279 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7280 } IEM_MC_ELSE() {
7281 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7282 } IEM_MC_ENDIF();
7283 IEM_MC_ADVANCE_RIP();
7284 IEM_MC_END();
7285 }
7286 return VINF_SUCCESS;
7287}
7288
7289
7290/** Opcode 0x0f 0x9d. */
7291FNIEMOP_DEF(iemOp_setnl_Eb)
7292{
7293 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
7294 IEMOP_HLP_MIN_386();
7295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7296
7297 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7298 * any way. AMD says it's "unused", whatever that means. We're
7299 * ignoring for now. */
7300 if (IEM_IS_MODRM_REG_MODE(bRm))
7301 {
7302 /* register target */
7303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7304 IEM_MC_BEGIN(0, 0);
7305 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7306 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7307 } IEM_MC_ELSE() {
7308 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7309 } IEM_MC_ENDIF();
7310 IEM_MC_ADVANCE_RIP();
7311 IEM_MC_END();
7312 }
7313 else
7314 {
7315 /* memory target */
7316 IEM_MC_BEGIN(0, 1);
7317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7320 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7321 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7322 } IEM_MC_ELSE() {
7323 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7324 } IEM_MC_ENDIF();
7325 IEM_MC_ADVANCE_RIP();
7326 IEM_MC_END();
7327 }
7328 return VINF_SUCCESS;
7329}
7330
7331
7332/** Opcode 0x0f 0x9e. */
7333FNIEMOP_DEF(iemOp_setle_Eb)
7334{
7335 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
7336 IEMOP_HLP_MIN_386();
7337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7338
7339 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7340 * any way. AMD says it's "unused", whatever that means. We're
7341 * ignoring for now. */
7342 if (IEM_IS_MODRM_REG_MODE(bRm))
7343 {
7344 /* register target */
7345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7346 IEM_MC_BEGIN(0, 0);
7347 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7348 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7349 } IEM_MC_ELSE() {
7350 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7351 } IEM_MC_ENDIF();
7352 IEM_MC_ADVANCE_RIP();
7353 IEM_MC_END();
7354 }
7355 else
7356 {
7357 /* memory target */
7358 IEM_MC_BEGIN(0, 1);
7359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7363 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7364 } IEM_MC_ELSE() {
7365 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7366 } IEM_MC_ENDIF();
7367 IEM_MC_ADVANCE_RIP();
7368 IEM_MC_END();
7369 }
7370 return VINF_SUCCESS;
7371}
7372
7373
7374/** Opcode 0x0f 0x9f. */
7375FNIEMOP_DEF(iemOp_setnle_Eb)
7376{
7377 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7378 IEMOP_HLP_MIN_386();
7379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7380
7381 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7382 * any way. AMD says it's "unused", whatever that means. We're
7383 * ignoring for now. */
7384 if (IEM_IS_MODRM_REG_MODE(bRm))
7385 {
7386 /* register target */
7387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7388 IEM_MC_BEGIN(0, 0);
7389 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7390 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7391 } IEM_MC_ELSE() {
7392 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7393 } IEM_MC_ENDIF();
7394 IEM_MC_ADVANCE_RIP();
7395 IEM_MC_END();
7396 }
7397 else
7398 {
7399 /* memory target */
7400 IEM_MC_BEGIN(0, 1);
7401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7404 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7405 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7406 } IEM_MC_ELSE() {
7407 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7408 } IEM_MC_ENDIF();
7409 IEM_MC_ADVANCE_RIP();
7410 IEM_MC_END();
7411 }
7412 return VINF_SUCCESS;
7413}
7414
7415
7416/**
7417 * Common 'push segment-register' helper.
7418 */
7419FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7420{
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7422 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7423 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7424
7425 switch (pVCpu->iem.s.enmEffOpSize)
7426 {
7427 case IEMMODE_16BIT:
7428 IEM_MC_BEGIN(0, 1);
7429 IEM_MC_LOCAL(uint16_t, u16Value);
7430 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7431 IEM_MC_PUSH_U16(u16Value);
7432 IEM_MC_ADVANCE_RIP();
7433 IEM_MC_END();
7434 break;
7435
7436 case IEMMODE_32BIT:
7437 IEM_MC_BEGIN(0, 1);
7438 IEM_MC_LOCAL(uint32_t, u32Value);
7439 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7440 IEM_MC_PUSH_U32_SREG(u32Value);
7441 IEM_MC_ADVANCE_RIP();
7442 IEM_MC_END();
7443 break;
7444
7445 case IEMMODE_64BIT:
7446 IEM_MC_BEGIN(0, 1);
7447 IEM_MC_LOCAL(uint64_t, u64Value);
7448 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7449 IEM_MC_PUSH_U64(u64Value);
7450 IEM_MC_ADVANCE_RIP();
7451 IEM_MC_END();
7452 break;
7453 }
7454
7455 return VINF_SUCCESS;
7456}
7457
7458
7459/** Opcode 0x0f 0xa0. */
7460FNIEMOP_DEF(iemOp_push_fs)
7461{
7462 IEMOP_MNEMONIC(push_fs, "push fs");
7463 IEMOP_HLP_MIN_386();
7464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7465 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7466}
7467
7468
7469/** Opcode 0x0f 0xa1. */
7470FNIEMOP_DEF(iemOp_pop_fs)
7471{
7472 IEMOP_MNEMONIC(pop_fs, "pop fs");
7473 IEMOP_HLP_MIN_386();
7474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7475 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7476}
7477
7478
7479/** Opcode 0x0f 0xa2. */
7480FNIEMOP_DEF(iemOp_cpuid)
7481{
7482 IEMOP_MNEMONIC(cpuid, "cpuid");
7483 IEMOP_HLP_MIN_486(); /* not all 486es. */
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7486}
7487
7488
7489/**
7490 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7491 * iemOp_bts_Ev_Gv.
7492 */
7493FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7494{
7495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7497
7498 if (IEM_IS_MODRM_REG_MODE(bRm))
7499 {
7500 /* register destination. */
7501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7502 switch (pVCpu->iem.s.enmEffOpSize)
7503 {
7504 case IEMMODE_16BIT:
7505 IEM_MC_BEGIN(3, 0);
7506 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7507 IEM_MC_ARG(uint16_t, u16Src, 1);
7508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7509
7510 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7511 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7512 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7513 IEM_MC_REF_EFLAGS(pEFlags);
7514 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7515
7516 IEM_MC_ADVANCE_RIP();
7517 IEM_MC_END();
7518 return VINF_SUCCESS;
7519
7520 case IEMMODE_32BIT:
7521 IEM_MC_BEGIN(3, 0);
7522 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7523 IEM_MC_ARG(uint32_t, u32Src, 1);
7524 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7525
7526 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7527 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7528 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7529 IEM_MC_REF_EFLAGS(pEFlags);
7530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7531
7532 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7533 IEM_MC_ADVANCE_RIP();
7534 IEM_MC_END();
7535 return VINF_SUCCESS;
7536
7537 case IEMMODE_64BIT:
7538 IEM_MC_BEGIN(3, 0);
7539 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7540 IEM_MC_ARG(uint64_t, u64Src, 1);
7541 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7542
7543 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7544 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7545 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7546 IEM_MC_REF_EFLAGS(pEFlags);
7547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7548
7549 IEM_MC_ADVANCE_RIP();
7550 IEM_MC_END();
7551 return VINF_SUCCESS;
7552
7553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7554 }
7555 }
7556 else
7557 {
7558 /* memory destination. */
7559
7560 uint32_t fAccess;
7561 if (pImpl->pfnLockedU16)
7562 fAccess = IEM_ACCESS_DATA_RW;
7563 else /* BT */
7564 fAccess = IEM_ACCESS_DATA_R;
7565
7566 /** @todo test negative bit offsets! */
7567 switch (pVCpu->iem.s.enmEffOpSize)
7568 {
7569 case IEMMODE_16BIT:
7570 IEM_MC_BEGIN(3, 2);
7571 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7572 IEM_MC_ARG(uint16_t, u16Src, 1);
7573 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7575 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7576
7577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7578 if (pImpl->pfnLockedU16)
7579 IEMOP_HLP_DONE_DECODING();
7580 else
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7583 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7584 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7585 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7586 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7587 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7588 IEM_MC_FETCH_EFLAGS(EFlags);
7589
7590 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7591 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7592 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7593 else
7594 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7595 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7596
7597 IEM_MC_COMMIT_EFLAGS(EFlags);
7598 IEM_MC_ADVANCE_RIP();
7599 IEM_MC_END();
7600 return VINF_SUCCESS;
7601
7602 case IEMMODE_32BIT:
7603 IEM_MC_BEGIN(3, 2);
7604 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7605 IEM_MC_ARG(uint32_t, u32Src, 1);
7606 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7608 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7609
7610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7611 if (pImpl->pfnLockedU16)
7612 IEMOP_HLP_DONE_DECODING();
7613 else
7614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7615 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7616 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7617 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7618 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7619 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7620 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7621 IEM_MC_FETCH_EFLAGS(EFlags);
7622
7623 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7624 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7626 else
7627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7629
7630 IEM_MC_COMMIT_EFLAGS(EFlags);
7631 IEM_MC_ADVANCE_RIP();
7632 IEM_MC_END();
7633 return VINF_SUCCESS;
7634
7635 case IEMMODE_64BIT:
7636 IEM_MC_BEGIN(3, 2);
7637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7638 IEM_MC_ARG(uint64_t, u64Src, 1);
7639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7641 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7642
7643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7644 if (pImpl->pfnLockedU16)
7645 IEMOP_HLP_DONE_DECODING();
7646 else
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7649 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7650 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7651 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7652 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7653 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7654 IEM_MC_FETCH_EFLAGS(EFlags);
7655
7656 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7657 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7659 else
7660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7661 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7662
7663 IEM_MC_COMMIT_EFLAGS(EFlags);
7664 IEM_MC_ADVANCE_RIP();
7665 IEM_MC_END();
7666 return VINF_SUCCESS;
7667
7668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7669 }
7670 }
7671}
7672
7673
7674/** Opcode 0x0f 0xa3. */
7675FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7676{
7677 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7678 IEMOP_HLP_MIN_386();
7679 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7680}
7681
7682
7683/**
7684 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7685 */
7686FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7687{
7688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7689 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7690
7691 if (IEM_IS_MODRM_REG_MODE(bRm))
7692 {
7693 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7695
7696 switch (pVCpu->iem.s.enmEffOpSize)
7697 {
7698 case IEMMODE_16BIT:
7699 IEM_MC_BEGIN(4, 0);
7700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7701 IEM_MC_ARG(uint16_t, u16Src, 1);
7702 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7703 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7704
7705 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7706 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7707 IEM_MC_REF_EFLAGS(pEFlags);
7708 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7709
7710 IEM_MC_ADVANCE_RIP();
7711 IEM_MC_END();
7712 return VINF_SUCCESS;
7713
7714 case IEMMODE_32BIT:
7715 IEM_MC_BEGIN(4, 0);
7716 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7717 IEM_MC_ARG(uint32_t, u32Src, 1);
7718 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7719 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7720
7721 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7722 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7723 IEM_MC_REF_EFLAGS(pEFlags);
7724 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7725
7726 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7727 IEM_MC_ADVANCE_RIP();
7728 IEM_MC_END();
7729 return VINF_SUCCESS;
7730
7731 case IEMMODE_64BIT:
7732 IEM_MC_BEGIN(4, 0);
7733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7734 IEM_MC_ARG(uint64_t, u64Src, 1);
7735 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7736 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7737
7738 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7739 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7740 IEM_MC_REF_EFLAGS(pEFlags);
7741 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7742
7743 IEM_MC_ADVANCE_RIP();
7744 IEM_MC_END();
7745 return VINF_SUCCESS;
7746
7747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7748 }
7749 }
7750 else
7751 {
7752 switch (pVCpu->iem.s.enmEffOpSize)
7753 {
7754 case IEMMODE_16BIT:
7755 IEM_MC_BEGIN(4, 2);
7756 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7757 IEM_MC_ARG(uint16_t, u16Src, 1);
7758 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7759 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7761
7762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7763 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7764 IEM_MC_ASSIGN(cShiftArg, cShift);
7765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7766 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7767 IEM_MC_FETCH_EFLAGS(EFlags);
7768 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7769 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7770
7771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7772 IEM_MC_COMMIT_EFLAGS(EFlags);
7773 IEM_MC_ADVANCE_RIP();
7774 IEM_MC_END();
7775 return VINF_SUCCESS;
7776
7777 case IEMMODE_32BIT:
7778 IEM_MC_BEGIN(4, 2);
7779 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7780 IEM_MC_ARG(uint32_t, u32Src, 1);
7781 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7784
7785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7786 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7787 IEM_MC_ASSIGN(cShiftArg, cShift);
7788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7789 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7790 IEM_MC_FETCH_EFLAGS(EFlags);
7791 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7792 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7793
7794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7795 IEM_MC_COMMIT_EFLAGS(EFlags);
7796 IEM_MC_ADVANCE_RIP();
7797 IEM_MC_END();
7798 return VINF_SUCCESS;
7799
7800 case IEMMODE_64BIT:
7801 IEM_MC_BEGIN(4, 2);
7802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7803 IEM_MC_ARG(uint64_t, u64Src, 1);
7804 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7805 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7807
7808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7809 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7810 IEM_MC_ASSIGN(cShiftArg, cShift);
7811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7812 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7813 IEM_MC_FETCH_EFLAGS(EFlags);
7814 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7815 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7816
7817 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7818 IEM_MC_COMMIT_EFLAGS(EFlags);
7819 IEM_MC_ADVANCE_RIP();
7820 IEM_MC_END();
7821 return VINF_SUCCESS;
7822
7823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7824 }
7825 }
7826}
7827
7828
7829/**
7830 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7831 */
7832FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7833{
7834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7835 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7836
7837 if (IEM_IS_MODRM_REG_MODE(bRm))
7838 {
7839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7840
7841 switch (pVCpu->iem.s.enmEffOpSize)
7842 {
7843 case IEMMODE_16BIT:
7844 IEM_MC_BEGIN(4, 0);
7845 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7846 IEM_MC_ARG(uint16_t, u16Src, 1);
7847 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7848 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7849
7850 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7851 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7852 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7853 IEM_MC_REF_EFLAGS(pEFlags);
7854 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7855
7856 IEM_MC_ADVANCE_RIP();
7857 IEM_MC_END();
7858 return VINF_SUCCESS;
7859
7860 case IEMMODE_32BIT:
7861 IEM_MC_BEGIN(4, 0);
7862 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7863 IEM_MC_ARG(uint32_t, u32Src, 1);
7864 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7865 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7866
7867 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7868 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7869 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7870 IEM_MC_REF_EFLAGS(pEFlags);
7871 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7872
7873 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7874 IEM_MC_ADVANCE_RIP();
7875 IEM_MC_END();
7876 return VINF_SUCCESS;
7877
7878 case IEMMODE_64BIT:
7879 IEM_MC_BEGIN(4, 0);
7880 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7881 IEM_MC_ARG(uint64_t, u64Src, 1);
7882 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7883 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7884
7885 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7886 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7887 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7888 IEM_MC_REF_EFLAGS(pEFlags);
7889 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7890
7891 IEM_MC_ADVANCE_RIP();
7892 IEM_MC_END();
7893 return VINF_SUCCESS;
7894
7895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7896 }
7897 }
7898 else
7899 {
7900 switch (pVCpu->iem.s.enmEffOpSize)
7901 {
7902 case IEMMODE_16BIT:
7903 IEM_MC_BEGIN(4, 2);
7904 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7905 IEM_MC_ARG(uint16_t, u16Src, 1);
7906 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7907 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7909
7910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7913 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7914 IEM_MC_FETCH_EFLAGS(EFlags);
7915 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7916 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7917
7918 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7919 IEM_MC_COMMIT_EFLAGS(EFlags);
7920 IEM_MC_ADVANCE_RIP();
7921 IEM_MC_END();
7922 return VINF_SUCCESS;
7923
7924 case IEMMODE_32BIT:
7925 IEM_MC_BEGIN(4, 2);
7926 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7927 IEM_MC_ARG(uint32_t, u32Src, 1);
7928 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7931
7932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7935 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7936 IEM_MC_FETCH_EFLAGS(EFlags);
7937 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7938 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7939
7940 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7941 IEM_MC_COMMIT_EFLAGS(EFlags);
7942 IEM_MC_ADVANCE_RIP();
7943 IEM_MC_END();
7944 return VINF_SUCCESS;
7945
7946 case IEMMODE_64BIT:
7947 IEM_MC_BEGIN(4, 2);
7948 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7949 IEM_MC_ARG(uint64_t, u64Src, 1);
7950 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7951 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7953
7954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7956 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7957 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7958 IEM_MC_FETCH_EFLAGS(EFlags);
7959 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7960 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7961
7962 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7963 IEM_MC_COMMIT_EFLAGS(EFlags);
7964 IEM_MC_ADVANCE_RIP();
7965 IEM_MC_END();
7966 return VINF_SUCCESS;
7967
7968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7969 }
7970 }
7971}
7972
7973
7974
7975/** Opcode 0x0f 0xa4. */
7976FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7977{
7978 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7979 IEMOP_HLP_MIN_386();
7980 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7981}
7982
7983
7984/** Opcode 0x0f 0xa5. */
7985FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7986{
7987 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7988 IEMOP_HLP_MIN_386();
7989 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7990}
7991
7992
7993/** Opcode 0x0f 0xa8. */
7994FNIEMOP_DEF(iemOp_push_gs)
7995{
7996 IEMOP_MNEMONIC(push_gs, "push gs");
7997 IEMOP_HLP_MIN_386();
7998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7999 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
8000}
8001
8002
8003/** Opcode 0x0f 0xa9. */
8004FNIEMOP_DEF(iemOp_pop_gs)
8005{
8006 IEMOP_MNEMONIC(pop_gs, "pop gs");
8007 IEMOP_HLP_MIN_386();
8008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8009 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
8010}
8011
8012
8013/** Opcode 0x0f 0xaa. */
8014FNIEMOP_DEF(iemOp_rsm)
8015{
8016 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
8017 IEMOP_HLP_MIN_386(); /* 386SL and later. */
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
8020}
8021
8022
8023
8024/** Opcode 0x0f 0xab. */
8025FNIEMOP_DEF(iemOp_bts_Ev_Gv)
8026{
8027 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
8028 IEMOP_HLP_MIN_386();
8029 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
8030}
8031
8032
8033/** Opcode 0x0f 0xac. */
8034FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
8035{
8036 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
8037 IEMOP_HLP_MIN_386();
8038 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
8039}
8040
8041
8042/** Opcode 0x0f 0xad. */
8043FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
8044{
8045 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
8046 IEMOP_HLP_MIN_386();
8047 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
8048}
8049
8050
8051/** Opcode 0x0f 0xae mem/0. */
8052FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
8053{
8054 IEMOP_MNEMONIC(fxsave, "fxsave m512");
8055 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
8056 return IEMOP_RAISE_INVALID_OPCODE();
8057
8058 IEM_MC_BEGIN(3, 1);
8059 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8060 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8061 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8065 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8066 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
8067 IEM_MC_END();
8068 return VINF_SUCCESS;
8069}
8070
8071
8072/** Opcode 0x0f 0xae mem/1. */
8073FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
8074{
8075 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
8076 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
8077 return IEMOP_RAISE_INVALID_OPCODE();
8078
8079 IEM_MC_BEGIN(3, 1);
8080 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8081 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8082 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8086 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8087 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8088 IEM_MC_END();
8089 return VINF_SUCCESS;
8090}
8091
8092
8093/**
8094 * @opmaps grp15
8095 * @opcode !11/2
8096 * @oppfx none
8097 * @opcpuid sse
8098 * @opgroup og_sse_mxcsrsm
8099 * @opxcpttype 5
8100 * @optest op1=0 -> mxcsr=0
8101 * @optest op1=0x2083 -> mxcsr=0x2083
8102 * @optest op1=0xfffffffe -> value.xcpt=0xd
8103 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
8104 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
8105 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
8106 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
8107 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
8108 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
8109 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
8110 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
8111 */
8112FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
8113{
8114 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8115 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
8116 return IEMOP_RAISE_INVALID_OPCODE();
8117
8118 IEM_MC_BEGIN(2, 0);
8119 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8120 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8124 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8125 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
8126 IEM_MC_END();
8127 return VINF_SUCCESS;
8128}
8129
8130
8131/**
8132 * @opmaps grp15
8133 * @opcode !11/3
8134 * @oppfx none
8135 * @opcpuid sse
8136 * @opgroup og_sse_mxcsrsm
8137 * @opxcpttype 5
8138 * @optest mxcsr=0 -> op1=0
8139 * @optest mxcsr=0x2083 -> op1=0x2083
8140 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
8141 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
8142 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
8143 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
8144 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
8145 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
8146 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
8147 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
8148 */
8149FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
8150{
8151 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8152 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
8153 return IEMOP_RAISE_INVALID_OPCODE();
8154
8155 IEM_MC_BEGIN(2, 0);
8156 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8157 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8160 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8161 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8162 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
8163 IEM_MC_END();
8164 return VINF_SUCCESS;
8165}
8166
8167
8168/**
8169 * @opmaps grp15
8170 * @opcode !11/4
8171 * @oppfx none
8172 * @opcpuid xsave
8173 * @opgroup og_system
8174 * @opxcpttype none
8175 */
8176FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
8177{
8178 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
8179 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8180 return IEMOP_RAISE_INVALID_OPCODE();
8181
8182 IEM_MC_BEGIN(3, 0);
8183 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8184 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8185 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8188 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8189 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8190 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
8191 IEM_MC_END();
8192 return VINF_SUCCESS;
8193}
8194
8195
8196/**
8197 * @opmaps grp15
8198 * @opcode !11/5
8199 * @oppfx none
8200 * @opcpuid xsave
8201 * @opgroup og_system
8202 * @opxcpttype none
8203 */
8204FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
8205{
8206 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
8207 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8208 return IEMOP_RAISE_INVALID_OPCODE();
8209
8210 IEM_MC_BEGIN(3, 0);
8211 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8212 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8213 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8216 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8217 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8218 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8219 IEM_MC_END();
8220 return VINF_SUCCESS;
8221}
8222
8223/** Opcode 0x0f 0xae mem/6. */
8224FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
8225
8226/**
8227 * @opmaps grp15
8228 * @opcode !11/7
8229 * @oppfx none
8230 * @opcpuid clfsh
8231 * @opgroup og_cachectl
8232 * @optest op1=1 ->
8233 */
8234FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
8235{
8236 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8237 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
8238 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8239
8240 IEM_MC_BEGIN(2, 0);
8241 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8242 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8245 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8246 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8247 IEM_MC_END();
8248 return VINF_SUCCESS;
8249}
8250
8251/**
8252 * @opmaps grp15
8253 * @opcode !11/7
8254 * @oppfx 0x66
8255 * @opcpuid clflushopt
8256 * @opgroup og_cachectl
8257 * @optest op1=1 ->
8258 */
8259FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
8260{
8261 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8262 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
8263 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8264
8265 IEM_MC_BEGIN(2, 0);
8266 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8267 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8270 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8271 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8272 IEM_MC_END();
8273 return VINF_SUCCESS;
8274}
8275
8276
8277/** Opcode 0x0f 0xae 11b/5. */
8278FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
8279{
8280 RT_NOREF_PV(bRm);
8281 IEMOP_MNEMONIC(lfence, "lfence");
8282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8283 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8284 return IEMOP_RAISE_INVALID_OPCODE();
8285
8286 IEM_MC_BEGIN(0, 0);
8287#ifndef RT_ARCH_ARM64
8288 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8289#endif
8290 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
8291#ifndef RT_ARCH_ARM64
8292 else
8293 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8294#endif
8295 IEM_MC_ADVANCE_RIP();
8296 IEM_MC_END();
8297 return VINF_SUCCESS;
8298}
8299
8300
8301/** Opcode 0x0f 0xae 11b/6. */
8302FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
8303{
8304 RT_NOREF_PV(bRm);
8305 IEMOP_MNEMONIC(mfence, "mfence");
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8308 return IEMOP_RAISE_INVALID_OPCODE();
8309
8310 IEM_MC_BEGIN(0, 0);
8311#ifndef RT_ARCH_ARM64
8312 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8313#endif
8314 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
8315#ifndef RT_ARCH_ARM64
8316 else
8317 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8318#endif
8319 IEM_MC_ADVANCE_RIP();
8320 IEM_MC_END();
8321 return VINF_SUCCESS;
8322}
8323
8324
8325/** Opcode 0x0f 0xae 11b/7. */
8326FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
8327{
8328 RT_NOREF_PV(bRm);
8329 IEMOP_MNEMONIC(sfence, "sfence");
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8332 return IEMOP_RAISE_INVALID_OPCODE();
8333
8334 IEM_MC_BEGIN(0, 0);
8335#ifndef RT_ARCH_ARM64
8336 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8337#endif
8338 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
8339#ifndef RT_ARCH_ARM64
8340 else
8341 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8342#endif
8343 IEM_MC_ADVANCE_RIP();
8344 IEM_MC_END();
8345 return VINF_SUCCESS;
8346}
8347
8348
8349/** Opcode 0xf3 0x0f 0xae 11b/0. */
8350FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
8351{
8352 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
8353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8354 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8355 {
8356 IEM_MC_BEGIN(1, 0);
8357 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8358 IEM_MC_ARG(uint64_t, u64Dst, 0);
8359 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
8360 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8361 IEM_MC_ADVANCE_RIP();
8362 IEM_MC_END();
8363 }
8364 else
8365 {
8366 IEM_MC_BEGIN(1, 0);
8367 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8368 IEM_MC_ARG(uint32_t, u32Dst, 0);
8369 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8370 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8371 IEM_MC_ADVANCE_RIP();
8372 IEM_MC_END();
8373 }
8374 return VINF_SUCCESS;
8375}
8376
8377
8378/** Opcode 0xf3 0x0f 0xae 11b/1. */
8379FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8380{
8381 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8383 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8384 {
8385 IEM_MC_BEGIN(1, 0);
8386 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8387 IEM_MC_ARG(uint64_t, u64Dst, 0);
8388 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8389 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8390 IEM_MC_ADVANCE_RIP();
8391 IEM_MC_END();
8392 }
8393 else
8394 {
8395 IEM_MC_BEGIN(1, 0);
8396 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8397 IEM_MC_ARG(uint32_t, u32Dst, 0);
8398 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8399 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8400 IEM_MC_ADVANCE_RIP();
8401 IEM_MC_END();
8402 }
8403 return VINF_SUCCESS;
8404}
8405
8406
8407/** Opcode 0xf3 0x0f 0xae 11b/2. */
8408FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8409{
8410 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8413 {
8414 IEM_MC_BEGIN(1, 0);
8415 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8416 IEM_MC_ARG(uint64_t, u64Dst, 0);
8417 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8418 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8419 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 }
8423 else
8424 {
8425 IEM_MC_BEGIN(1, 0);
8426 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8427 IEM_MC_ARG(uint32_t, u32Dst, 0);
8428 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8429 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8430 IEM_MC_ADVANCE_RIP();
8431 IEM_MC_END();
8432 }
8433 return VINF_SUCCESS;
8434}
8435
8436
8437/** Opcode 0xf3 0x0f 0xae 11b/3. */
8438FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8439{
8440 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8442 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8443 {
8444 IEM_MC_BEGIN(1, 0);
8445 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8446 IEM_MC_ARG(uint64_t, u64Dst, 0);
8447 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8448 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8449 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8450 IEM_MC_ADVANCE_RIP();
8451 IEM_MC_END();
8452 }
8453 else
8454 {
8455 IEM_MC_BEGIN(1, 0);
8456 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8457 IEM_MC_ARG(uint32_t, u32Dst, 0);
8458 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8459 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8460 IEM_MC_ADVANCE_RIP();
8461 IEM_MC_END();
8462 }
8463 return VINF_SUCCESS;
8464}
8465
8466
8467/**
8468 * Group 15 jump table for register variant.
8469 */
8470IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8471{ /* pfx: none, 066h, 0f3h, 0f2h */
8472 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8473 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8474 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8475 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8476 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8477 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8478 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8479 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8480};
8481AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8482
8483
8484/**
8485 * Group 15 jump table for memory variant.
8486 */
8487IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8488{ /* pfx: none, 066h, 0f3h, 0f2h */
8489 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8490 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8491 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8492 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8493 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8494 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8495 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8496 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8497};
8498AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8499
8500
8501/** Opcode 0x0f 0xae. */
8502FNIEMOP_DEF(iemOp_Grp15)
8503{
8504 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8506 if (IEM_IS_MODRM_REG_MODE(bRm))
8507 /* register, register */
8508 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8509 + pVCpu->iem.s.idxPrefix], bRm);
8510 /* memory, register */
8511 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8512 + pVCpu->iem.s.idxPrefix], bRm);
8513}
8514
8515
8516/** Opcode 0x0f 0xaf. */
8517FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8518{
8519 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8520 IEMOP_HLP_MIN_386();
8521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8522 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8523}
8524
8525
8526/** Opcode 0x0f 0xb0. */
8527FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8528{
8529 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8530 IEMOP_HLP_MIN_486();
8531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8532
8533 if (IEM_IS_MODRM_REG_MODE(bRm))
8534 {
8535 IEMOP_HLP_DONE_DECODING();
8536 IEM_MC_BEGIN(4, 0);
8537 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8538 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8539 IEM_MC_ARG(uint8_t, u8Src, 2);
8540 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8541
8542 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8543 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8544 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8545 IEM_MC_REF_EFLAGS(pEFlags);
8546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8547 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8548 else
8549 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8550
8551 IEM_MC_ADVANCE_RIP();
8552 IEM_MC_END();
8553 }
8554 else
8555 {
8556 IEM_MC_BEGIN(4, 3);
8557 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8558 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8559 IEM_MC_ARG(uint8_t, u8Src, 2);
8560 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8562 IEM_MC_LOCAL(uint8_t, u8Al);
8563
8564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8565 IEMOP_HLP_DONE_DECODING();
8566 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8567 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8568 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8569 IEM_MC_FETCH_EFLAGS(EFlags);
8570 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8571 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8572 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8573 else
8574 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8575
8576 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8577 IEM_MC_COMMIT_EFLAGS(EFlags);
8578 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8579 IEM_MC_ADVANCE_RIP();
8580 IEM_MC_END();
8581 }
8582 return VINF_SUCCESS;
8583}
8584
8585/** Opcode 0x0f 0xb1. */
8586FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8587{
8588 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8589 IEMOP_HLP_MIN_486();
8590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8591
8592 if (IEM_IS_MODRM_REG_MODE(bRm))
8593 {
8594 IEMOP_HLP_DONE_DECODING();
8595 switch (pVCpu->iem.s.enmEffOpSize)
8596 {
8597 case IEMMODE_16BIT:
8598 IEM_MC_BEGIN(4, 0);
8599 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8600 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8601 IEM_MC_ARG(uint16_t, u16Src, 2);
8602 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8603
8604 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8605 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8606 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8607 IEM_MC_REF_EFLAGS(pEFlags);
8608 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8609 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8610 else
8611 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8612
8613 IEM_MC_ADVANCE_RIP();
8614 IEM_MC_END();
8615 return VINF_SUCCESS;
8616
8617 case IEMMODE_32BIT:
8618 IEM_MC_BEGIN(4, 0);
8619 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8620 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8621 IEM_MC_ARG(uint32_t, u32Src, 2);
8622 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8623
8624 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8625 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8626 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8627 IEM_MC_REF_EFLAGS(pEFlags);
8628 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8629 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8630 else
8631 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8632
8633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8634 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8635 IEM_MC_ADVANCE_RIP();
8636 IEM_MC_END();
8637 return VINF_SUCCESS;
8638
8639 case IEMMODE_64BIT:
8640 IEM_MC_BEGIN(4, 0);
8641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8642 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8643#ifdef RT_ARCH_X86
8644 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8645#else
8646 IEM_MC_ARG(uint64_t, u64Src, 2);
8647#endif
8648 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8649
8650 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8651 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8652 IEM_MC_REF_EFLAGS(pEFlags);
8653#ifdef RT_ARCH_X86
8654 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8655 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8656 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8657 else
8658 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8659#else
8660 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8661 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8662 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8663 else
8664 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8665#endif
8666
8667 IEM_MC_ADVANCE_RIP();
8668 IEM_MC_END();
8669 return VINF_SUCCESS;
8670
8671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8672 }
8673 }
8674 else
8675 {
8676 switch (pVCpu->iem.s.enmEffOpSize)
8677 {
8678 case IEMMODE_16BIT:
8679 IEM_MC_BEGIN(4, 3);
8680 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8681 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8682 IEM_MC_ARG(uint16_t, u16Src, 2);
8683 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8685 IEM_MC_LOCAL(uint16_t, u16Ax);
8686
8687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8688 IEMOP_HLP_DONE_DECODING();
8689 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8690 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8691 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8692 IEM_MC_FETCH_EFLAGS(EFlags);
8693 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8694 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8695 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8696 else
8697 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8698
8699 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8700 IEM_MC_COMMIT_EFLAGS(EFlags);
8701 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8702 IEM_MC_ADVANCE_RIP();
8703 IEM_MC_END();
8704 return VINF_SUCCESS;
8705
8706 case IEMMODE_32BIT:
8707 IEM_MC_BEGIN(4, 3);
8708 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8709 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8710 IEM_MC_ARG(uint32_t, u32Src, 2);
8711 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8713 IEM_MC_LOCAL(uint32_t, u32Eax);
8714
8715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8716 IEMOP_HLP_DONE_DECODING();
8717 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8718 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8719 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8720 IEM_MC_FETCH_EFLAGS(EFlags);
8721 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8722 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8723 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8724 else
8725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8726
8727 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8728 IEM_MC_COMMIT_EFLAGS(EFlags);
8729 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8730 IEM_MC_ADVANCE_RIP();
8731 IEM_MC_END();
8732 return VINF_SUCCESS;
8733
8734 case IEMMODE_64BIT:
8735 IEM_MC_BEGIN(4, 3);
8736 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8737 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8738#ifdef RT_ARCH_X86
8739 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8740#else
8741 IEM_MC_ARG(uint64_t, u64Src, 2);
8742#endif
8743 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8745 IEM_MC_LOCAL(uint64_t, u64Rax);
8746
8747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8748 IEMOP_HLP_DONE_DECODING();
8749 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8750 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8751 IEM_MC_FETCH_EFLAGS(EFlags);
8752 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8753#ifdef RT_ARCH_X86
8754 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8755 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8756 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8757 else
8758 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8759#else
8760 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8761 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8762 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8763 else
8764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8765#endif
8766
8767 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8768 IEM_MC_COMMIT_EFLAGS(EFlags);
8769 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8770 IEM_MC_ADVANCE_RIP();
8771 IEM_MC_END();
8772 return VINF_SUCCESS;
8773
8774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8775 }
8776 }
8777}
8778
8779
8780FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8781{
8782 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8783 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8784
8785 switch (pVCpu->iem.s.enmEffOpSize)
8786 {
8787 case IEMMODE_16BIT:
8788 IEM_MC_BEGIN(5, 1);
8789 IEM_MC_ARG(uint16_t, uSel, 0);
8790 IEM_MC_ARG(uint16_t, offSeg, 1);
8791 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8792 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8793 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8794 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8797 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8798 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8799 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8800 IEM_MC_END();
8801 return VINF_SUCCESS;
8802
8803 case IEMMODE_32BIT:
8804 IEM_MC_BEGIN(5, 1);
8805 IEM_MC_ARG(uint16_t, uSel, 0);
8806 IEM_MC_ARG(uint32_t, offSeg, 1);
8807 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8808 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8809 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8810 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8813 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8814 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8815 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8816 IEM_MC_END();
8817 return VINF_SUCCESS;
8818
8819 case IEMMODE_64BIT:
8820 IEM_MC_BEGIN(5, 1);
8821 IEM_MC_ARG(uint16_t, uSel, 0);
8822 IEM_MC_ARG(uint64_t, offSeg, 1);
8823 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8824 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8825 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8826 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8830 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8831 else
8832 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8833 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8834 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8835 IEM_MC_END();
8836 return VINF_SUCCESS;
8837
8838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8839 }
8840}
8841
8842
8843/** Opcode 0x0f 0xb2. */
8844FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8845{
8846 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8847 IEMOP_HLP_MIN_386();
8848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8849 if (IEM_IS_MODRM_REG_MODE(bRm))
8850 return IEMOP_RAISE_INVALID_OPCODE();
8851 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8852}
8853
8854
8855/** Opcode 0x0f 0xb3. */
8856FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8857{
8858 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8859 IEMOP_HLP_MIN_386();
8860 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8861}
8862
8863
8864/** Opcode 0x0f 0xb4. */
8865FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8866{
8867 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8868 IEMOP_HLP_MIN_386();
8869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8870 if (IEM_IS_MODRM_REG_MODE(bRm))
8871 return IEMOP_RAISE_INVALID_OPCODE();
8872 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8873}
8874
8875
8876/** Opcode 0x0f 0xb5. */
8877FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8878{
8879 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8880 IEMOP_HLP_MIN_386();
8881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8882 if (IEM_IS_MODRM_REG_MODE(bRm))
8883 return IEMOP_RAISE_INVALID_OPCODE();
8884 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8885}
8886
8887
8888/** Opcode 0x0f 0xb6. */
8889FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8890{
8891 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8892 IEMOP_HLP_MIN_386();
8893
8894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8895
8896 /*
8897 * If rm is denoting a register, no more instruction bytes.
8898 */
8899 if (IEM_IS_MODRM_REG_MODE(bRm))
8900 {
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902 switch (pVCpu->iem.s.enmEffOpSize)
8903 {
8904 case IEMMODE_16BIT:
8905 IEM_MC_BEGIN(0, 1);
8906 IEM_MC_LOCAL(uint16_t, u16Value);
8907 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8908 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8909 IEM_MC_ADVANCE_RIP();
8910 IEM_MC_END();
8911 return VINF_SUCCESS;
8912
8913 case IEMMODE_32BIT:
8914 IEM_MC_BEGIN(0, 1);
8915 IEM_MC_LOCAL(uint32_t, u32Value);
8916 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8917 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8918 IEM_MC_ADVANCE_RIP();
8919 IEM_MC_END();
8920 return VINF_SUCCESS;
8921
8922 case IEMMODE_64BIT:
8923 IEM_MC_BEGIN(0, 1);
8924 IEM_MC_LOCAL(uint64_t, u64Value);
8925 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8926 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8927 IEM_MC_ADVANCE_RIP();
8928 IEM_MC_END();
8929 return VINF_SUCCESS;
8930
8931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8932 }
8933 }
8934 else
8935 {
8936 /*
8937 * We're loading a register from memory.
8938 */
8939 switch (pVCpu->iem.s.enmEffOpSize)
8940 {
8941 case IEMMODE_16BIT:
8942 IEM_MC_BEGIN(0, 2);
8943 IEM_MC_LOCAL(uint16_t, u16Value);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8947 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8948 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8949 IEM_MC_ADVANCE_RIP();
8950 IEM_MC_END();
8951 return VINF_SUCCESS;
8952
8953 case IEMMODE_32BIT:
8954 IEM_MC_BEGIN(0, 2);
8955 IEM_MC_LOCAL(uint32_t, u32Value);
8956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8959 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8960 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8961 IEM_MC_ADVANCE_RIP();
8962 IEM_MC_END();
8963 return VINF_SUCCESS;
8964
8965 case IEMMODE_64BIT:
8966 IEM_MC_BEGIN(0, 2);
8967 IEM_MC_LOCAL(uint64_t, u64Value);
8968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8971 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8972 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8973 IEM_MC_ADVANCE_RIP();
8974 IEM_MC_END();
8975 return VINF_SUCCESS;
8976
8977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8978 }
8979 }
8980}
8981
8982
8983/** Opcode 0x0f 0xb7. */
8984FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8985{
8986 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8987 IEMOP_HLP_MIN_386();
8988
8989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8990
8991 /** @todo Not entirely sure how the operand size prefix is handled here,
8992 * assuming that it will be ignored. Would be nice to have a few
8993 * test for this. */
8994 /*
8995 * If rm is denoting a register, no more instruction bytes.
8996 */
8997 if (IEM_IS_MODRM_REG_MODE(bRm))
8998 {
8999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9000 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9001 {
9002 IEM_MC_BEGIN(0, 1);
9003 IEM_MC_LOCAL(uint32_t, u32Value);
9004 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9005 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9006 IEM_MC_ADVANCE_RIP();
9007 IEM_MC_END();
9008 }
9009 else
9010 {
9011 IEM_MC_BEGIN(0, 1);
9012 IEM_MC_LOCAL(uint64_t, u64Value);
9013 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9014 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9015 IEM_MC_ADVANCE_RIP();
9016 IEM_MC_END();
9017 }
9018 }
9019 else
9020 {
9021 /*
9022 * We're loading a register from memory.
9023 */
9024 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9025 {
9026 IEM_MC_BEGIN(0, 2);
9027 IEM_MC_LOCAL(uint32_t, u32Value);
9028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9032 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9033 IEM_MC_ADVANCE_RIP();
9034 IEM_MC_END();
9035 }
9036 else
9037 {
9038 IEM_MC_BEGIN(0, 2);
9039 IEM_MC_LOCAL(uint64_t, u64Value);
9040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9043 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9044 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9045 IEM_MC_ADVANCE_RIP();
9046 IEM_MC_END();
9047 }
9048 }
9049 return VINF_SUCCESS;
9050}
9051
9052
9053/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
9054FNIEMOP_UD_STUB(iemOp_jmpe);
9055
9056
9057/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
9058FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
9059{
9060 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9061 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
9062 return iemOp_InvalidNeedRM(pVCpu);
9063#ifndef TST_IEM_CHECK_MC
9064# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
9065 static const IEMOPBINSIZES s_Native =
9066 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
9067# endif
9068 static const IEMOPBINSIZES s_Fallback =
9069 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
9070#endif
9071 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
9072}
9073
9074
9075/**
9076 * @opcode 0xb9
9077 * @opinvalid intel-modrm
9078 * @optest ->
9079 */
9080FNIEMOP_DEF(iemOp_Grp10)
9081{
9082 /*
9083 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
9084 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
9085 */
9086 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
9087 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
9088 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
9089}
9090
9091
9092/** Opcode 0x0f 0xba. */
9093FNIEMOP_DEF(iemOp_Grp8)
9094{
9095 IEMOP_HLP_MIN_386();
9096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9097 PCIEMOPBINSIZES pImpl;
9098 switch (IEM_GET_MODRM_REG_8(bRm))
9099 {
9100 case 0: case 1: case 2: case 3:
9101 /* Both AMD and Intel want full modr/m decoding and imm8. */
9102 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
9103 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
9104 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
9105 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
9106 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
9107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9108 }
9109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9110
9111 if (IEM_IS_MODRM_REG_MODE(bRm))
9112 {
9113 /* register destination. */
9114 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9116
9117 switch (pVCpu->iem.s.enmEffOpSize)
9118 {
9119 case IEMMODE_16BIT:
9120 IEM_MC_BEGIN(3, 0);
9121 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9122 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
9123 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9124
9125 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9126 IEM_MC_REF_EFLAGS(pEFlags);
9127 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9128
9129 IEM_MC_ADVANCE_RIP();
9130 IEM_MC_END();
9131 return VINF_SUCCESS;
9132
9133 case IEMMODE_32BIT:
9134 IEM_MC_BEGIN(3, 0);
9135 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9136 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
9137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9138
9139 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9140 IEM_MC_REF_EFLAGS(pEFlags);
9141 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9142
9143 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9144 IEM_MC_ADVANCE_RIP();
9145 IEM_MC_END();
9146 return VINF_SUCCESS;
9147
9148 case IEMMODE_64BIT:
9149 IEM_MC_BEGIN(3, 0);
9150 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9151 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
9152 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9153
9154 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9155 IEM_MC_REF_EFLAGS(pEFlags);
9156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9157
9158 IEM_MC_ADVANCE_RIP();
9159 IEM_MC_END();
9160 return VINF_SUCCESS;
9161
9162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9163 }
9164 }
9165 else
9166 {
9167 /* memory destination. */
9168
9169 uint32_t fAccess;
9170 if (pImpl->pfnLockedU16)
9171 fAccess = IEM_ACCESS_DATA_RW;
9172 else /* BT */
9173 fAccess = IEM_ACCESS_DATA_R;
9174
9175 /** @todo test negative bit offsets! */
9176 switch (pVCpu->iem.s.enmEffOpSize)
9177 {
9178 case IEMMODE_16BIT:
9179 IEM_MC_BEGIN(3, 1);
9180 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9181 IEM_MC_ARG(uint16_t, u16Src, 1);
9182 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9184
9185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9186 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9187 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
9188 if (pImpl->pfnLockedU16)
9189 IEMOP_HLP_DONE_DECODING();
9190 else
9191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9192 IEM_MC_FETCH_EFLAGS(EFlags);
9193 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9194 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9196 else
9197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9198 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9199
9200 IEM_MC_COMMIT_EFLAGS(EFlags);
9201 IEM_MC_ADVANCE_RIP();
9202 IEM_MC_END();
9203 return VINF_SUCCESS;
9204
9205 case IEMMODE_32BIT:
9206 IEM_MC_BEGIN(3, 1);
9207 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9208 IEM_MC_ARG(uint32_t, u32Src, 1);
9209 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9211
9212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9213 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9214 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
9215 if (pImpl->pfnLockedU16)
9216 IEMOP_HLP_DONE_DECODING();
9217 else
9218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9219 IEM_MC_FETCH_EFLAGS(EFlags);
9220 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9221 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9223 else
9224 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9225 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9226
9227 IEM_MC_COMMIT_EFLAGS(EFlags);
9228 IEM_MC_ADVANCE_RIP();
9229 IEM_MC_END();
9230 return VINF_SUCCESS;
9231
9232 case IEMMODE_64BIT:
9233 IEM_MC_BEGIN(3, 1);
9234 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9235 IEM_MC_ARG(uint64_t, u64Src, 1);
9236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9238
9239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9240 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9241 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
9242 if (pImpl->pfnLockedU16)
9243 IEMOP_HLP_DONE_DECODING();
9244 else
9245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9246 IEM_MC_FETCH_EFLAGS(EFlags);
9247 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9250 else
9251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9252 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9253
9254 IEM_MC_COMMIT_EFLAGS(EFlags);
9255 IEM_MC_ADVANCE_RIP();
9256 IEM_MC_END();
9257 return VINF_SUCCESS;
9258
9259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9260 }
9261 }
9262}
9263
9264
9265/** Opcode 0x0f 0xbb. */
9266FNIEMOP_DEF(iemOp_btc_Ev_Gv)
9267{
9268 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
9269 IEMOP_HLP_MIN_386();
9270 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
9271}
9272
9273
9274/**
9275 * Common worker for BSF and BSR instructions.
9276 *
9277 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
9278 * the destination register, which means that for 32-bit operations the high
9279 * bits must be left alone.
9280 *
9281 * @param pImpl Pointer to the instruction implementation (assembly).
9282 */
9283FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
9284{
9285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9286
9287 /*
9288 * If rm is denoting a register, no more instruction bytes.
9289 */
9290 if (IEM_IS_MODRM_REG_MODE(bRm))
9291 {
9292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9293 switch (pVCpu->iem.s.enmEffOpSize)
9294 {
9295 case IEMMODE_16BIT:
9296 IEM_MC_BEGIN(3, 0);
9297 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9298 IEM_MC_ARG(uint16_t, u16Src, 1);
9299 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9300
9301 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9302 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9303 IEM_MC_REF_EFLAGS(pEFlags);
9304 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9305
9306 IEM_MC_ADVANCE_RIP();
9307 IEM_MC_END();
9308 break;
9309
9310 case IEMMODE_32BIT:
9311 IEM_MC_BEGIN(3, 0);
9312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9313 IEM_MC_ARG(uint32_t, u32Src, 1);
9314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9315
9316 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9317 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9318 IEM_MC_REF_EFLAGS(pEFlags);
9319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9320 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9321 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9322 IEM_MC_ENDIF();
9323 IEM_MC_ADVANCE_RIP();
9324 IEM_MC_END();
9325 break;
9326
9327 case IEMMODE_64BIT:
9328 IEM_MC_BEGIN(3, 0);
9329 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9330 IEM_MC_ARG(uint64_t, u64Src, 1);
9331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9332
9333 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9334 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9335 IEM_MC_REF_EFLAGS(pEFlags);
9336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9337
9338 IEM_MC_ADVANCE_RIP();
9339 IEM_MC_END();
9340 break;
9341 }
9342 }
9343 else
9344 {
9345 /*
9346 * We're accessing memory.
9347 */
9348 switch (pVCpu->iem.s.enmEffOpSize)
9349 {
9350 case IEMMODE_16BIT:
9351 IEM_MC_BEGIN(3, 1);
9352 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9353 IEM_MC_ARG(uint16_t, u16Src, 1);
9354 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9356
9357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9359 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9360 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9361 IEM_MC_REF_EFLAGS(pEFlags);
9362 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9363
9364 IEM_MC_ADVANCE_RIP();
9365 IEM_MC_END();
9366 break;
9367
9368 case IEMMODE_32BIT:
9369 IEM_MC_BEGIN(3, 1);
9370 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9371 IEM_MC_ARG(uint32_t, u32Src, 1);
9372 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9374
9375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9377 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9378 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9379 IEM_MC_REF_EFLAGS(pEFlags);
9380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9381
9382 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9383 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9384 IEM_MC_ENDIF();
9385 IEM_MC_ADVANCE_RIP();
9386 IEM_MC_END();
9387 break;
9388
9389 case IEMMODE_64BIT:
9390 IEM_MC_BEGIN(3, 1);
9391 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9392 IEM_MC_ARG(uint64_t, u64Src, 1);
9393 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9395
9396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9398 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9399 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9400 IEM_MC_REF_EFLAGS(pEFlags);
9401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9402
9403 IEM_MC_ADVANCE_RIP();
9404 IEM_MC_END();
9405 break;
9406 }
9407 }
9408 return VINF_SUCCESS;
9409}
9410
9411
9412/** Opcode 0x0f 0xbc. */
9413FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9414{
9415 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9416 IEMOP_HLP_MIN_386();
9417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9418 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9419}
9420
9421
9422/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9423FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9424{
9425 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9426 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9427 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9428
9429#ifndef TST_IEM_CHECK_MC
9430 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9431 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9432 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9433 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9434 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9435 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9436 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9437 {
9438 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9439 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9440 };
9441#endif
9442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9443 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9444 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9445}
9446
9447
9448/** Opcode 0x0f 0xbd. */
9449FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9450{
9451 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9452 IEMOP_HLP_MIN_386();
9453 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9454 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9455}
9456
9457
9458/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9459FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9460{
9461 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9462 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9463 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9464
9465#ifndef TST_IEM_CHECK_MC
9466 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9467 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9468 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9469 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9470 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9471 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9472 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9473 {
9474 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9475 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9476 };
9477#endif
9478 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9479 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9480 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9481}
9482
9483
9484
9485/** Opcode 0x0f 0xbe. */
9486FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9487{
9488 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9489 IEMOP_HLP_MIN_386();
9490
9491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9492
9493 /*
9494 * If rm is denoting a register, no more instruction bytes.
9495 */
9496 if (IEM_IS_MODRM_REG_MODE(bRm))
9497 {
9498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9499 switch (pVCpu->iem.s.enmEffOpSize)
9500 {
9501 case IEMMODE_16BIT:
9502 IEM_MC_BEGIN(0, 1);
9503 IEM_MC_LOCAL(uint16_t, u16Value);
9504 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9505 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9506 IEM_MC_ADVANCE_RIP();
9507 IEM_MC_END();
9508 return VINF_SUCCESS;
9509
9510 case IEMMODE_32BIT:
9511 IEM_MC_BEGIN(0, 1);
9512 IEM_MC_LOCAL(uint32_t, u32Value);
9513 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9515 IEM_MC_ADVANCE_RIP();
9516 IEM_MC_END();
9517 return VINF_SUCCESS;
9518
9519 case IEMMODE_64BIT:
9520 IEM_MC_BEGIN(0, 1);
9521 IEM_MC_LOCAL(uint64_t, u64Value);
9522 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9523 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9524 IEM_MC_ADVANCE_RIP();
9525 IEM_MC_END();
9526 return VINF_SUCCESS;
9527
9528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9529 }
9530 }
9531 else
9532 {
9533 /*
9534 * We're loading a register from memory.
9535 */
9536 switch (pVCpu->iem.s.enmEffOpSize)
9537 {
9538 case IEMMODE_16BIT:
9539 IEM_MC_BEGIN(0, 2);
9540 IEM_MC_LOCAL(uint16_t, u16Value);
9541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9544 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9545 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9546 IEM_MC_ADVANCE_RIP();
9547 IEM_MC_END();
9548 return VINF_SUCCESS;
9549
9550 case IEMMODE_32BIT:
9551 IEM_MC_BEGIN(0, 2);
9552 IEM_MC_LOCAL(uint32_t, u32Value);
9553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9556 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9557 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9558 IEM_MC_ADVANCE_RIP();
9559 IEM_MC_END();
9560 return VINF_SUCCESS;
9561
9562 case IEMMODE_64BIT:
9563 IEM_MC_BEGIN(0, 2);
9564 IEM_MC_LOCAL(uint64_t, u64Value);
9565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9568 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9569 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9570 IEM_MC_ADVANCE_RIP();
9571 IEM_MC_END();
9572 return VINF_SUCCESS;
9573
9574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9575 }
9576 }
9577}
9578
9579
9580/** Opcode 0x0f 0xbf. */
9581FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9582{
9583 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9584 IEMOP_HLP_MIN_386();
9585
9586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9587
9588 /** @todo Not entirely sure how the operand size prefix is handled here,
9589 * assuming that it will be ignored. Would be nice to have a few
9590 * test for this. */
9591 /*
9592 * If rm is denoting a register, no more instruction bytes.
9593 */
9594 if (IEM_IS_MODRM_REG_MODE(bRm))
9595 {
9596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9597 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9598 {
9599 IEM_MC_BEGIN(0, 1);
9600 IEM_MC_LOCAL(uint32_t, u32Value);
9601 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9602 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9603 IEM_MC_ADVANCE_RIP();
9604 IEM_MC_END();
9605 }
9606 else
9607 {
9608 IEM_MC_BEGIN(0, 1);
9609 IEM_MC_LOCAL(uint64_t, u64Value);
9610 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9611 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9612 IEM_MC_ADVANCE_RIP();
9613 IEM_MC_END();
9614 }
9615 }
9616 else
9617 {
9618 /*
9619 * We're loading a register from memory.
9620 */
9621 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9622 {
9623 IEM_MC_BEGIN(0, 2);
9624 IEM_MC_LOCAL(uint32_t, u32Value);
9625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9628 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9629 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9630 IEM_MC_ADVANCE_RIP();
9631 IEM_MC_END();
9632 }
9633 else
9634 {
9635 IEM_MC_BEGIN(0, 2);
9636 IEM_MC_LOCAL(uint64_t, u64Value);
9637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9640 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9641 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9642 IEM_MC_ADVANCE_RIP();
9643 IEM_MC_END();
9644 }
9645 }
9646 return VINF_SUCCESS;
9647}
9648
9649
9650/** Opcode 0x0f 0xc0. */
9651FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9652{
9653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9654 IEMOP_HLP_MIN_486();
9655 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9656
9657 /*
9658 * If rm is denoting a register, no more instruction bytes.
9659 */
9660 if (IEM_IS_MODRM_REG_MODE(bRm))
9661 {
9662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9663
9664 IEM_MC_BEGIN(3, 0);
9665 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9666 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9668
9669 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9670 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9671 IEM_MC_REF_EFLAGS(pEFlags);
9672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9673
9674 IEM_MC_ADVANCE_RIP();
9675 IEM_MC_END();
9676 }
9677 else
9678 {
9679 /*
9680 * We're accessing memory.
9681 */
9682 IEM_MC_BEGIN(3, 3);
9683 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9684 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9685 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9686 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9688
9689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9690 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9691 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9692 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9693 IEM_MC_FETCH_EFLAGS(EFlags);
9694 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9696 else
9697 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9698
9699 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9700 IEM_MC_COMMIT_EFLAGS(EFlags);
9701 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9702 IEM_MC_ADVANCE_RIP();
9703 IEM_MC_END();
9704 return VINF_SUCCESS;
9705 }
9706 return VINF_SUCCESS;
9707}
9708
9709
9710/** Opcode 0x0f 0xc1. */
9711FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9712{
9713 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9714 IEMOP_HLP_MIN_486();
9715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9716
9717 /*
9718 * If rm is denoting a register, no more instruction bytes.
9719 */
9720 if (IEM_IS_MODRM_REG_MODE(bRm))
9721 {
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9723
9724 switch (pVCpu->iem.s.enmEffOpSize)
9725 {
9726 case IEMMODE_16BIT:
9727 IEM_MC_BEGIN(3, 0);
9728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9729 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9731
9732 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9733 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9734 IEM_MC_REF_EFLAGS(pEFlags);
9735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9736
9737 IEM_MC_ADVANCE_RIP();
9738 IEM_MC_END();
9739 return VINF_SUCCESS;
9740
9741 case IEMMODE_32BIT:
9742 IEM_MC_BEGIN(3, 0);
9743 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9744 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9745 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9746
9747 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9748 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9749 IEM_MC_REF_EFLAGS(pEFlags);
9750 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9751
9752 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9753 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9754 IEM_MC_ADVANCE_RIP();
9755 IEM_MC_END();
9756 return VINF_SUCCESS;
9757
9758 case IEMMODE_64BIT:
9759 IEM_MC_BEGIN(3, 0);
9760 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9761 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9763
9764 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9765 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9766 IEM_MC_REF_EFLAGS(pEFlags);
9767 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9768
9769 IEM_MC_ADVANCE_RIP();
9770 IEM_MC_END();
9771 return VINF_SUCCESS;
9772
9773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9774 }
9775 }
9776 else
9777 {
9778 /*
9779 * We're accessing memory.
9780 */
9781 switch (pVCpu->iem.s.enmEffOpSize)
9782 {
9783 case IEMMODE_16BIT:
9784 IEM_MC_BEGIN(3, 3);
9785 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9786 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9787 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9788 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9790
9791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9792 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9793 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9794 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9795 IEM_MC_FETCH_EFLAGS(EFlags);
9796 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9797 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9798 else
9799 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9800
9801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9802 IEM_MC_COMMIT_EFLAGS(EFlags);
9803 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9804 IEM_MC_ADVANCE_RIP();
9805 IEM_MC_END();
9806 return VINF_SUCCESS;
9807
9808 case IEMMODE_32BIT:
9809 IEM_MC_BEGIN(3, 3);
9810 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9811 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9812 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9813 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9815
9816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9817 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9818 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9819 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9820 IEM_MC_FETCH_EFLAGS(EFlags);
9821 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9822 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9823 else
9824 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9825
9826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9827 IEM_MC_COMMIT_EFLAGS(EFlags);
9828 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9829 IEM_MC_ADVANCE_RIP();
9830 IEM_MC_END();
9831 return VINF_SUCCESS;
9832
9833 case IEMMODE_64BIT:
9834 IEM_MC_BEGIN(3, 3);
9835 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9836 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9837 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9838 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9840
9841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9842 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9843 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9844 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9845 IEM_MC_FETCH_EFLAGS(EFlags);
9846 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9847 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9848 else
9849 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9850
9851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9852 IEM_MC_COMMIT_EFLAGS(EFlags);
9853 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9854 IEM_MC_ADVANCE_RIP();
9855 IEM_MC_END();
9856 return VINF_SUCCESS;
9857
9858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9859 }
9860 }
9861}
9862
9863
9864/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9865FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9866/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9867FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9868/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9869FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9870/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9871FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9872
9873
9874/** Opcode 0x0f 0xc3. */
9875FNIEMOP_DEF(iemOp_movnti_My_Gy)
9876{
9877 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9878
9879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9880
9881 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9882 if (IEM_IS_MODRM_MEM_MODE(bRm))
9883 {
9884 switch (pVCpu->iem.s.enmEffOpSize)
9885 {
9886 case IEMMODE_32BIT:
9887 IEM_MC_BEGIN(0, 2);
9888 IEM_MC_LOCAL(uint32_t, u32Value);
9889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9890
9891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9894 return IEMOP_RAISE_INVALID_OPCODE();
9895
9896 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9898 IEM_MC_ADVANCE_RIP();
9899 IEM_MC_END();
9900 break;
9901
9902 case IEMMODE_64BIT:
9903 IEM_MC_BEGIN(0, 2);
9904 IEM_MC_LOCAL(uint64_t, u64Value);
9905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9906
9907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9909 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9910 return IEMOP_RAISE_INVALID_OPCODE();
9911
9912 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9913 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9914 IEM_MC_ADVANCE_RIP();
9915 IEM_MC_END();
9916 break;
9917
9918 case IEMMODE_16BIT:
9919 /** @todo check this form. */
9920 return IEMOP_RAISE_INVALID_OPCODE();
9921 }
9922 }
9923 else
9924 return IEMOP_RAISE_INVALID_OPCODE();
9925 return VINF_SUCCESS;
9926}
9927/* Opcode 0x66 0x0f 0xc3 - invalid */
9928/* Opcode 0xf3 0x0f 0xc3 - invalid */
9929/* Opcode 0xf2 0x0f 0xc3 - invalid */
9930
9931/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9932FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9933/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9934FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9935/* Opcode 0xf3 0x0f 0xc4 - invalid */
9936/* Opcode 0xf2 0x0f 0xc4 - invalid */
9937
9938/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9939FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9940/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9941FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9942/* Opcode 0xf3 0x0f 0xc5 - invalid */
9943/* Opcode 0xf2 0x0f 0xc5 - invalid */
9944
9945/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9946FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9947/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9948FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9949/* Opcode 0xf3 0x0f 0xc6 - invalid */
9950/* Opcode 0xf2 0x0f 0xc6 - invalid */
9951
9952
9953/** Opcode 0x0f 0xc7 !11/1. */
9954FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9955{
9956 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9957
9958 IEM_MC_BEGIN(4, 3);
9959 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9960 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9961 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9962 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9963 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9964 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9966
9967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9968 IEMOP_HLP_DONE_DECODING();
9969 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9970
9971 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9972 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9973 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9974
9975 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9976 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9977 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9978
9979 IEM_MC_FETCH_EFLAGS(EFlags);
9980 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9981 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9982 else
9983 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9984
9985 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9986 IEM_MC_COMMIT_EFLAGS(EFlags);
9987 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9988 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9989 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9990 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9991 IEM_MC_ENDIF();
9992 IEM_MC_ADVANCE_RIP();
9993
9994 IEM_MC_END();
9995 return VINF_SUCCESS;
9996}
9997
9998
9999/** Opcode REX.W 0x0f 0xc7 !11/1. */
10000FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
10001{
10002 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
10003 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
10004 {
10005#if 0
10006 RT_NOREF(bRm);
10007 IEMOP_BITCH_ABOUT_STUB();
10008 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
10009#else
10010 IEM_MC_BEGIN(4, 3);
10011 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
10012 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
10013 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
10014 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
10015 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
10016 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018
10019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10020 IEMOP_HLP_DONE_DECODING();
10021 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
10022 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10023
10024 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
10025 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
10026 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
10027
10028 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
10029 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
10030 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
10031
10032 IEM_MC_FETCH_EFLAGS(EFlags);
10033# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
10034# if defined(RT_ARCH_AMD64)
10035 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
10036# endif
10037 {
10038 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10039 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10040 else
10041 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10042 }
10043# if defined(RT_ARCH_AMD64)
10044 else
10045# endif
10046# endif
10047# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
10048 {
10049 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
10050 accesses and not all all atomic, which works fine on in UNI CPU guest
10051 configuration (ignoring DMA). If guest SMP is active we have no choice
10052 but to use a rendezvous callback here. Sigh. */
10053 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
10054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10055 else
10056 {
10057 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10058 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
10059 }
10060 }
10061# endif
10062
10063 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
10064 IEM_MC_COMMIT_EFLAGS(EFlags);
10065 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10066 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
10067 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
10068 IEM_MC_ENDIF();
10069 IEM_MC_ADVANCE_RIP();
10070
10071 IEM_MC_END();
10072 return VINF_SUCCESS;
10073#endif
10074 }
10075 Log(("cmpxchg16b -> #UD\n"));
10076 return IEMOP_RAISE_INVALID_OPCODE();
10077}
10078
10079FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
10080{
10081 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
10082 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
10083 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
10084}
10085
10086/** Opcode 0x0f 0xc7 11/6. */
10087FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
10088
10089/** Opcode 0x0f 0xc7 !11/6. */
10090#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10091FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
10092{
10093 IEMOP_MNEMONIC(vmptrld, "vmptrld");
10094 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
10095 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
10096 IEM_MC_BEGIN(2, 0);
10097 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10098 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
10099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10100 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10101 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10102 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
10103 IEM_MC_END();
10104 return VINF_SUCCESS;
10105}
10106#else
10107FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
10108#endif
10109
10110/** Opcode 0x66 0x0f 0xc7 !11/6. */
10111#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10112FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
10113{
10114 IEMOP_MNEMONIC(vmclear, "vmclear");
10115 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
10116 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
10117 IEM_MC_BEGIN(2, 0);
10118 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10119 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10121 IEMOP_HLP_DONE_DECODING();
10122 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10123 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
10124 IEM_MC_END();
10125 return VINF_SUCCESS;
10126}
10127#else
10128FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
10129#endif
10130
10131/** Opcode 0xf3 0x0f 0xc7 !11/6. */
10132#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10133FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
10134{
10135 IEMOP_MNEMONIC(vmxon, "vmxon");
10136 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
10137 IEM_MC_BEGIN(2, 0);
10138 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10139 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
10140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10141 IEMOP_HLP_DONE_DECODING();
10142 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10143 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
10144 IEM_MC_END();
10145 return VINF_SUCCESS;
10146}
10147#else
10148FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
10149#endif
10150
10151/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
10152#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10153FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
10154{
10155 IEMOP_MNEMONIC(vmptrst, "vmptrst");
10156 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
10157 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
10158 IEM_MC_BEGIN(2, 0);
10159 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10160 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10162 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10163 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10164 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
10165 IEM_MC_END();
10166 return VINF_SUCCESS;
10167}
10168#else
10169FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
10170#endif
10171
10172/** Opcode 0x0f 0xc7 11/7. */
10173FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
10174
10175
10176/**
10177 * Group 9 jump table for register variant.
10178 */
10179IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
10180{ /* pfx: none, 066h, 0f3h, 0f2h */
10181 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10182 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
10183 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10184 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10185 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10186 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10187 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10188 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10189};
10190AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
10191
10192
10193/**
10194 * Group 9 jump table for memory variant.
10195 */
10196IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
10197{ /* pfx: none, 066h, 0f3h, 0f2h */
10198 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10199 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
10200 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10201 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10202 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10203 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10204 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
10205 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10206};
10207AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
10208
10209
10210/** Opcode 0x0f 0xc7. */
10211FNIEMOP_DEF(iemOp_Grp9)
10212{
10213 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
10214 if (IEM_IS_MODRM_REG_MODE(bRm))
10215 /* register, register */
10216 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10217 + pVCpu->iem.s.idxPrefix], bRm);
10218 /* memory, register */
10219 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10220 + pVCpu->iem.s.idxPrefix], bRm);
10221}
10222
10223
10224/**
10225 * Common 'bswap register' helper.
10226 */
10227FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
10228{
10229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10230 switch (pVCpu->iem.s.enmEffOpSize)
10231 {
10232 case IEMMODE_16BIT:
10233 IEM_MC_BEGIN(1, 0);
10234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10235 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
10236 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
10237 IEM_MC_ADVANCE_RIP();
10238 IEM_MC_END();
10239 return VINF_SUCCESS;
10240
10241 case IEMMODE_32BIT:
10242 IEM_MC_BEGIN(1, 0);
10243 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10244 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
10245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10246 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
10247 IEM_MC_ADVANCE_RIP();
10248 IEM_MC_END();
10249 return VINF_SUCCESS;
10250
10251 case IEMMODE_64BIT:
10252 IEM_MC_BEGIN(1, 0);
10253 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10254 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
10255 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
10256 IEM_MC_ADVANCE_RIP();
10257 IEM_MC_END();
10258 return VINF_SUCCESS;
10259
10260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10261 }
10262}
10263
10264
10265/** Opcode 0x0f 0xc8. */
10266FNIEMOP_DEF(iemOp_bswap_rAX_r8)
10267{
10268 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
10269 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
10270 prefix. REX.B is the correct prefix it appears. For a parallel
10271 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
10272 IEMOP_HLP_MIN_486();
10273 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
10274}
10275
10276
10277/** Opcode 0x0f 0xc9. */
10278FNIEMOP_DEF(iemOp_bswap_rCX_r9)
10279{
10280 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
10281 IEMOP_HLP_MIN_486();
10282 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
10283}
10284
10285
10286/** Opcode 0x0f 0xca. */
10287FNIEMOP_DEF(iemOp_bswap_rDX_r10)
10288{
10289 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
10290 IEMOP_HLP_MIN_486();
10291 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
10292}
10293
10294
10295/** Opcode 0x0f 0xcb. */
10296FNIEMOP_DEF(iemOp_bswap_rBX_r11)
10297{
10298 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
10299 IEMOP_HLP_MIN_486();
10300 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
10301}
10302
10303
10304/** Opcode 0x0f 0xcc. */
10305FNIEMOP_DEF(iemOp_bswap_rSP_r12)
10306{
10307 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
10308 IEMOP_HLP_MIN_486();
10309 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
10310}
10311
10312
10313/** Opcode 0x0f 0xcd. */
10314FNIEMOP_DEF(iemOp_bswap_rBP_r13)
10315{
10316 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
10317 IEMOP_HLP_MIN_486();
10318 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
10319}
10320
10321
10322/** Opcode 0x0f 0xce. */
10323FNIEMOP_DEF(iemOp_bswap_rSI_r14)
10324{
10325 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
10326 IEMOP_HLP_MIN_486();
10327 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
10328}
10329
10330
10331/** Opcode 0x0f 0xcf. */
10332FNIEMOP_DEF(iemOp_bswap_rDI_r15)
10333{
10334 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
10335 IEMOP_HLP_MIN_486();
10336 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
10337}
10338
10339
10340/* Opcode 0x0f 0xd0 - invalid */
10341
10342
10343/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
10344FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
10345{
10346 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
10347 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
10348}
10349
10350
10351/* Opcode 0xf3 0x0f 0xd0 - invalid */
10352
10353
10354/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
10355FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
10356{
10357 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
10358 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
10359}
10360
10361
10362
10363/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
10364FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
10365{
10366 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10367 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
10368}
10369
10370/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
10371FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
10372{
10373 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10374 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
10375}
10376
10377/* Opcode 0xf3 0x0f 0xd1 - invalid */
10378/* Opcode 0xf2 0x0f 0xd1 - invalid */
10379
10380/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10381FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10382{
10383 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10384 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10385}
10386
10387
10388/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10389FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10390{
10391 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10392 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10393}
10394
10395
10396/* Opcode 0xf3 0x0f 0xd2 - invalid */
10397/* Opcode 0xf2 0x0f 0xd2 - invalid */
10398
10399/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10400FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10401{
10402 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10403 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10404}
10405
10406
10407/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10408FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10409{
10410 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10411 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10412}
10413
10414
10415/* Opcode 0xf3 0x0f 0xd3 - invalid */
10416/* Opcode 0xf2 0x0f 0xd3 - invalid */
10417
10418
10419/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10420FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10421{
10422 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10423 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10424}
10425
10426
10427/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10428FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10429{
10430 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10431 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10432}
10433
10434
10435/* Opcode 0xf3 0x0f 0xd4 - invalid */
10436/* Opcode 0xf2 0x0f 0xd4 - invalid */
10437
10438/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10439FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10440{
10441 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10442 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10443}
10444
10445/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10446FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10447{
10448 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10449 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10450}
10451
10452
10453/* Opcode 0xf3 0x0f 0xd5 - invalid */
10454/* Opcode 0xf2 0x0f 0xd5 - invalid */
10455
10456/* Opcode 0x0f 0xd6 - invalid */
10457
10458/**
10459 * @opcode 0xd6
10460 * @oppfx 0x66
10461 * @opcpuid sse2
10462 * @opgroup og_sse2_pcksclr_datamove
10463 * @opxcpttype none
10464 * @optest op1=-1 op2=2 -> op1=2
10465 * @optest op1=0 op2=-42 -> op1=-42
10466 */
10467FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10468{
10469 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10471 if (IEM_IS_MODRM_REG_MODE(bRm))
10472 {
10473 /*
10474 * Register, register.
10475 */
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEM_MC_BEGIN(0, 2);
10478 IEM_MC_LOCAL(uint64_t, uSrc);
10479
10480 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10482
10483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10484 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10485
10486 IEM_MC_ADVANCE_RIP();
10487 IEM_MC_END();
10488 }
10489 else
10490 {
10491 /*
10492 * Memory, register.
10493 */
10494 IEM_MC_BEGIN(0, 2);
10495 IEM_MC_LOCAL(uint64_t, uSrc);
10496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10497
10498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10500 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10502
10503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10505
10506 IEM_MC_ADVANCE_RIP();
10507 IEM_MC_END();
10508 }
10509 return VINF_SUCCESS;
10510}
10511
10512
10513/**
10514 * @opcode 0xd6
10515 * @opcodesub 11 mr/reg
10516 * @oppfx f3
10517 * @opcpuid sse2
10518 * @opgroup og_sse2_simdint_datamove
10519 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10520 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10521 */
10522FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10523{
10524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10525 if (IEM_IS_MODRM_REG_MODE(bRm))
10526 {
10527 /*
10528 * Register, register.
10529 */
10530 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 IEM_MC_BEGIN(0, 1);
10533 IEM_MC_LOCAL(uint64_t, uSrc);
10534
10535 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10536 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10537
10538 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10539 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10540 IEM_MC_FPU_TO_MMX_MODE();
10541
10542 IEM_MC_ADVANCE_RIP();
10543 IEM_MC_END();
10544 return VINF_SUCCESS;
10545 }
10546
10547 /**
10548 * @opdone
10549 * @opmnemonic udf30fd6mem
10550 * @opcode 0xd6
10551 * @opcodesub !11 mr/reg
10552 * @oppfx f3
10553 * @opunused intel-modrm
10554 * @opcpuid sse
10555 * @optest ->
10556 */
10557 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10558}
10559
10560
10561/**
10562 * @opcode 0xd6
10563 * @opcodesub 11 mr/reg
10564 * @oppfx f2
10565 * @opcpuid sse2
10566 * @opgroup og_sse2_simdint_datamove
10567 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10568 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10569 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10570 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10571 * @optest op1=-42 op2=0xfedcba9876543210
10572 * -> op1=0xfedcba9876543210 ftw=0xff
10573 */
10574FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10575{
10576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10577 if (IEM_IS_MODRM_REG_MODE(bRm))
10578 {
10579 /*
10580 * Register, register.
10581 */
10582 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10584 IEM_MC_BEGIN(0, 1);
10585 IEM_MC_LOCAL(uint64_t, uSrc);
10586
10587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10589
10590 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10591 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10592 IEM_MC_FPU_TO_MMX_MODE();
10593
10594 IEM_MC_ADVANCE_RIP();
10595 IEM_MC_END();
10596 return VINF_SUCCESS;
10597 }
10598
10599 /**
10600 * @opdone
10601 * @opmnemonic udf20fd6mem
10602 * @opcode 0xd6
10603 * @opcodesub !11 mr/reg
10604 * @oppfx f2
10605 * @opunused intel-modrm
10606 * @opcpuid sse
10607 * @optest ->
10608 */
10609 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10610}
10611
10612
10613/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10614FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10615{
10616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10617 /* Docs says register only. */
10618 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10619 {
10620 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10621 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10623 IEM_MC_BEGIN(2, 0);
10624 IEM_MC_ARG(uint64_t *, puDst, 0);
10625 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10626 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10627 IEM_MC_PREPARE_FPU_USAGE();
10628 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10629 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10630 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10631 IEM_MC_FPU_TO_MMX_MODE();
10632 IEM_MC_ADVANCE_RIP();
10633 IEM_MC_END();
10634 return VINF_SUCCESS;
10635 }
10636 return IEMOP_RAISE_INVALID_OPCODE();
10637}
10638
10639
10640/** Opcode 0x66 0x0f 0xd7 - */
10641FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10642{
10643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10644 /* Docs says register only. */
10645 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10646 {
10647 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10648 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10650 IEM_MC_BEGIN(2, 0);
10651 IEM_MC_ARG(uint64_t *, puDst, 0);
10652 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10654 IEM_MC_PREPARE_SSE_USAGE();
10655 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10656 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10657 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10658 IEM_MC_ADVANCE_RIP();
10659 IEM_MC_END();
10660 return VINF_SUCCESS;
10661 }
10662 return IEMOP_RAISE_INVALID_OPCODE();
10663}
10664
10665
10666/* Opcode 0xf3 0x0f 0xd7 - invalid */
10667/* Opcode 0xf2 0x0f 0xd7 - invalid */
10668
10669
10670/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10671FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10672{
10673 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10674 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10675}
10676
10677
10678/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10679FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10680{
10681 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10682 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10683}
10684
10685
10686/* Opcode 0xf3 0x0f 0xd8 - invalid */
10687/* Opcode 0xf2 0x0f 0xd8 - invalid */
10688
10689/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10690FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10691{
10692 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10693 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10694}
10695
10696
10697/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10698FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10699{
10700 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10701 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10702}
10703
10704
10705/* Opcode 0xf3 0x0f 0xd9 - invalid */
10706/* Opcode 0xf2 0x0f 0xd9 - invalid */
10707
10708/** Opcode 0x0f 0xda - pminub Pq, Qq */
10709FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10710{
10711 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10712 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10713}
10714
10715
10716/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10717FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10718{
10719 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10720 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10721}
10722
10723/* Opcode 0xf3 0x0f 0xda - invalid */
10724/* Opcode 0xf2 0x0f 0xda - invalid */
10725
10726/** Opcode 0x0f 0xdb - pand Pq, Qq */
10727FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10728{
10729 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10730 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10731}
10732
10733
10734/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10735FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10736{
10737 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10738 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10739}
10740
10741
10742/* Opcode 0xf3 0x0f 0xdb - invalid */
10743/* Opcode 0xf2 0x0f 0xdb - invalid */
10744
10745/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10746FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10747{
10748 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10749 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10750}
10751
10752
10753/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10754FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10755{
10756 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10757 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10758}
10759
10760
10761/* Opcode 0xf3 0x0f 0xdc - invalid */
10762/* Opcode 0xf2 0x0f 0xdc - invalid */
10763
10764/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10765FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10766{
10767 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10768 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10769}
10770
10771
10772/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10773FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10774{
10775 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10776 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10777}
10778
10779
10780/* Opcode 0xf3 0x0f 0xdd - invalid */
10781/* Opcode 0xf2 0x0f 0xdd - invalid */
10782
10783/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10784FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10785{
10786 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10787 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10788}
10789
10790
10791/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10792FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10793{
10794 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10795 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10796}
10797
10798/* Opcode 0xf3 0x0f 0xde - invalid */
10799/* Opcode 0xf2 0x0f 0xde - invalid */
10800
10801
10802/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10803FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10804{
10805 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10806 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10807}
10808
10809
10810/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10811FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10812{
10813 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10814 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10815}
10816
10817
10818/* Opcode 0xf3 0x0f 0xdf - invalid */
10819/* Opcode 0xf2 0x0f 0xdf - invalid */
10820
10821/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10822FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10823{
10824 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10825 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10826}
10827
10828
10829/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10830FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10831{
10832 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10833 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10834}
10835
10836
10837/* Opcode 0xf3 0x0f 0xe0 - invalid */
10838/* Opcode 0xf2 0x0f 0xe0 - invalid */
10839
10840/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10841FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10842{
10843 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10844 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10845}
10846
10847
10848/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10849FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10850{
10851 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10852 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10853}
10854
10855
10856/* Opcode 0xf3 0x0f 0xe1 - invalid */
10857/* Opcode 0xf2 0x0f 0xe1 - invalid */
10858
10859/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10860FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10861{
10862 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10863 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10864}
10865
10866
10867/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10868FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10869{
10870 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10871 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10872}
10873
10874
10875/* Opcode 0xf3 0x0f 0xe2 - invalid */
10876/* Opcode 0xf2 0x0f 0xe2 - invalid */
10877
10878/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10879FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10880{
10881 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10882 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10883}
10884
10885
10886/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10887FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10888{
10889 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10890 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10891}
10892
10893
10894/* Opcode 0xf3 0x0f 0xe3 - invalid */
10895/* Opcode 0xf2 0x0f 0xe3 - invalid */
10896
10897/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10898FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10899{
10900 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10901 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10902}
10903
10904
10905/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10906FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10907{
10908 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10909 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10910}
10911
10912
10913/* Opcode 0xf3 0x0f 0xe4 - invalid */
10914/* Opcode 0xf2 0x0f 0xe4 - invalid */
10915
10916/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10917FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10918{
10919 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10920 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10921}
10922
10923
10924/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10925FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10926{
10927 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10928 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10929}
10930
10931
10932/* Opcode 0xf3 0x0f 0xe5 - invalid */
10933/* Opcode 0xf2 0x0f 0xe5 - invalid */
10934
10935/* Opcode 0x0f 0xe6 - invalid */
10936/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10937FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10938/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10939FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10940/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10941FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10942
10943
10944/**
10945 * @opcode 0xe7
10946 * @opcodesub !11 mr/reg
10947 * @oppfx none
10948 * @opcpuid sse
10949 * @opgroup og_sse1_cachect
10950 * @opxcpttype none
10951 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10952 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10953 */
10954FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10955{
10956 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10958 if (IEM_IS_MODRM_MEM_MODE(bRm))
10959 {
10960 /* Register, memory. */
10961 IEM_MC_BEGIN(0, 2);
10962 IEM_MC_LOCAL(uint64_t, uSrc);
10963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10964
10965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10967 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10968 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10969
10970 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10971 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10972 IEM_MC_FPU_TO_MMX_MODE();
10973
10974 IEM_MC_ADVANCE_RIP();
10975 IEM_MC_END();
10976 return VINF_SUCCESS;
10977 }
10978 /**
10979 * @opdone
10980 * @opmnemonic ud0fe7reg
10981 * @opcode 0xe7
10982 * @opcodesub 11 mr/reg
10983 * @oppfx none
10984 * @opunused immediate
10985 * @opcpuid sse
10986 * @optest ->
10987 */
10988 return IEMOP_RAISE_INVALID_OPCODE();
10989}
10990
10991/**
10992 * @opcode 0xe7
10993 * @opcodesub !11 mr/reg
10994 * @oppfx 0x66
10995 * @opcpuid sse2
10996 * @opgroup og_sse2_cachect
10997 * @opxcpttype 1
10998 * @optest op1=-1 op2=2 -> op1=2
10999 * @optest op1=0 op2=-42 -> op1=-42
11000 */
11001FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
11002{
11003 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11005 if (IEM_IS_MODRM_MEM_MODE(bRm))
11006 {
11007 /* Register, memory. */
11008 IEM_MC_BEGIN(0, 2);
11009 IEM_MC_LOCAL(RTUINT128U, uSrc);
11010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11011
11012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11015 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
11016
11017 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
11018 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
11019
11020 IEM_MC_ADVANCE_RIP();
11021 IEM_MC_END();
11022 return VINF_SUCCESS;
11023 }
11024
11025 /**
11026 * @opdone
11027 * @opmnemonic ud660fe7reg
11028 * @opcode 0xe7
11029 * @opcodesub 11 mr/reg
11030 * @oppfx 0x66
11031 * @opunused immediate
11032 * @opcpuid sse
11033 * @optest ->
11034 */
11035 return IEMOP_RAISE_INVALID_OPCODE();
11036}
11037
11038/* Opcode 0xf3 0x0f 0xe7 - invalid */
11039/* Opcode 0xf2 0x0f 0xe7 - invalid */
11040
11041
11042/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
11043FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
11044{
11045 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11046 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
11047}
11048
11049
11050/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
11051FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
11052{
11053 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11054 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
11055}
11056
11057
11058/* Opcode 0xf3 0x0f 0xe8 - invalid */
11059/* Opcode 0xf2 0x0f 0xe8 - invalid */
11060
11061/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
11062FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
11063{
11064 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11065 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
11066}
11067
11068
11069/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
11070FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
11071{
11072 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11073 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
11074}
11075
11076
11077/* Opcode 0xf3 0x0f 0xe9 - invalid */
11078/* Opcode 0xf2 0x0f 0xe9 - invalid */
11079
11080
11081/** Opcode 0x0f 0xea - pminsw Pq, Qq */
11082FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
11083{
11084 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11085 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
11086}
11087
11088
11089/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
11090FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
11091{
11092 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11093 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
11094}
11095
11096
11097/* Opcode 0xf3 0x0f 0xea - invalid */
11098/* Opcode 0xf2 0x0f 0xea - invalid */
11099
11100
11101/** Opcode 0x0f 0xeb - por Pq, Qq */
11102FNIEMOP_DEF(iemOp_por_Pq_Qq)
11103{
11104 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11105 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
11106}
11107
11108
11109/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
11110FNIEMOP_DEF(iemOp_por_Vx_Wx)
11111{
11112 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11113 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
11114}
11115
11116
11117/* Opcode 0xf3 0x0f 0xeb - invalid */
11118/* Opcode 0xf2 0x0f 0xeb - invalid */
11119
11120/** Opcode 0x0f 0xec - paddsb Pq, Qq */
11121FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
11122{
11123 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11124 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
11125}
11126
11127
11128/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
11129FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
11130{
11131 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11132 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
11133}
11134
11135
11136/* Opcode 0xf3 0x0f 0xec - invalid */
11137/* Opcode 0xf2 0x0f 0xec - invalid */
11138
11139/** Opcode 0x0f 0xed - paddsw Pq, Qq */
11140FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
11141{
11142 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11143 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
11144}
11145
11146
11147/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
11148FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
11149{
11150 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11151 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
11152}
11153
11154
11155/* Opcode 0xf3 0x0f 0xed - invalid */
11156/* Opcode 0xf2 0x0f 0xed - invalid */
11157
11158
11159/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
11160FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
11161{
11162 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11163 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
11164}
11165
11166
11167/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
11168FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
11169{
11170 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11171 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
11172}
11173
11174
11175/* Opcode 0xf3 0x0f 0xee - invalid */
11176/* Opcode 0xf2 0x0f 0xee - invalid */
11177
11178
11179/** Opcode 0x0f 0xef - pxor Pq, Qq */
11180FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
11181{
11182 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11183 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
11184}
11185
11186
11187/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
11188FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
11189{
11190 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11191 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
11192}
11193
11194
11195/* Opcode 0xf3 0x0f 0xef - invalid */
11196/* Opcode 0xf2 0x0f 0xef - invalid */
11197
11198/* Opcode 0x0f 0xf0 - invalid */
11199/* Opcode 0x66 0x0f 0xf0 - invalid */
11200/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
11201FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
11202
11203
11204/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
11205FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
11206{
11207 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11208 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
11209}
11210
11211
11212/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
11213FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
11214{
11215 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11216 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
11217}
11218
11219
11220/* Opcode 0xf2 0x0f 0xf1 - invalid */
11221
11222/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
11223FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
11224{
11225 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11226 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
11227}
11228
11229
11230/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
11231FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
11232{
11233 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11234 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
11235}
11236
11237
11238/* Opcode 0xf2 0x0f 0xf2 - invalid */
11239
11240/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
11241FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
11242{
11243 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11244 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
11245}
11246
11247
11248/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
11249FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
11250{
11251 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11252 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
11253}
11254
11255/* Opcode 0xf2 0x0f 0xf3 - invalid */
11256
11257/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
11258FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
11259{
11260 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11261 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
11262}
11263
11264
11265/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
11266FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
11267{
11268 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11269 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
11270}
11271
11272
11273/* Opcode 0xf2 0x0f 0xf4 - invalid */
11274
11275/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
11276FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
11277{
11278 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11279 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
11280}
11281
11282
11283/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
11284FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
11285{
11286 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11287 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
11288}
11289
11290/* Opcode 0xf2 0x0f 0xf5 - invalid */
11291
11292/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
11293FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
11294{
11295 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
11296 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
11297}
11298
11299
11300/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
11301FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
11302{
11303 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11304 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
11305}
11306
11307
11308/* Opcode 0xf2 0x0f 0xf6 - invalid */
11309
11310/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
11311FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
11312/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
11313FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
11314/* Opcode 0xf2 0x0f 0xf7 - invalid */
11315
11316
11317/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
11318FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
11319{
11320 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11321 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
11322}
11323
11324
11325/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
11326FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
11327{
11328 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11329 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
11330}
11331
11332
11333/* Opcode 0xf2 0x0f 0xf8 - invalid */
11334
11335
11336/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
11337FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
11338{
11339 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11340 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
11341}
11342
11343
11344/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
11345FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
11346{
11347 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11348 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
11349}
11350
11351
11352/* Opcode 0xf2 0x0f 0xf9 - invalid */
11353
11354
11355/** Opcode 0x0f 0xfa - psubd Pq, Qq */
11356FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
11357{
11358 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11359 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
11360}
11361
11362
11363/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
11364FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
11365{
11366 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11367 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
11368}
11369
11370
11371/* Opcode 0xf2 0x0f 0xfa - invalid */
11372
11373
11374/** Opcode 0x0f 0xfb - psubq Pq, Qq */
11375FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
11376{
11377 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11378 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11379}
11380
11381
11382/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11383FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11384{
11385 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11386 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11387}
11388
11389
11390/* Opcode 0xf2 0x0f 0xfb - invalid */
11391
11392
11393/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11394FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11395{
11396 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11397 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11398}
11399
11400
11401/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11402FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11403{
11404 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11405 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11406}
11407
11408
11409/* Opcode 0xf2 0x0f 0xfc - invalid */
11410
11411
11412/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11413FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11414{
11415 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11416 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11417}
11418
11419
11420/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11421FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11422{
11423 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11424 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11425}
11426
11427
11428/* Opcode 0xf2 0x0f 0xfd - invalid */
11429
11430
11431/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11432FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11433{
11434 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11435 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11436}
11437
11438
11439/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11440FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11441{
11442 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11443 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11444}
11445
11446
11447/* Opcode 0xf2 0x0f 0xfe - invalid */
11448
11449
11450/** Opcode **** 0x0f 0xff - UD0 */
11451FNIEMOP_DEF(iemOp_ud0)
11452{
11453 IEMOP_MNEMONIC(ud0, "ud0");
11454 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11455 {
11456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11457#ifndef TST_IEM_CHECK_MC
11458 if (IEM_IS_MODRM_MEM_MODE(bRm))
11459 {
11460 RTGCPTR GCPtrEff;
11461 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11462 if (rcStrict != VINF_SUCCESS)
11463 return rcStrict;
11464 }
11465#endif
11466 IEMOP_HLP_DONE_DECODING();
11467 }
11468 return IEMOP_RAISE_INVALID_OPCODE();
11469}
11470
11471
11472
11473/**
11474 * Two byte opcode map, first byte 0x0f.
11475 *
11476 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11477 * check if it needs updating as well when making changes.
11478 */
11479IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11480{
11481 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11482 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11483 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11484 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11485 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11486 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11487 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11488 /* 0x06 */ IEMOP_X4(iemOp_clts),
11489 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11490 /* 0x08 */ IEMOP_X4(iemOp_invd),
11491 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11492 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11493 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11494 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11495 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11496 /* 0x0e */ IEMOP_X4(iemOp_femms),
11497 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11498
11499 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11500 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11501 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11502 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11503 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11504 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11505 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11506 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11507 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11508 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11509 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11510 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11511 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11512 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11513 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11514 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11515
11516 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11517 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11518 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11519 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11520 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11521 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11522 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11523 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11524 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11525 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11526 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11527 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11528 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11529 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11530 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11531 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11532
11533 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11534 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11535 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11536 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11537 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11538 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11539 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11540 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11541 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11542 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11543 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11544 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11545 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11546 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11547 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11548 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11549
11550 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11551 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11552 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11553 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11554 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11555 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11556 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11557 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11558 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11559 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11560 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11561 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11562 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11563 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11564 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11565 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11566
11567 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11568 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11569 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11570 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11571 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11572 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11573 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11574 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11575 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11576 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11577 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11578 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11579 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11580 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11581 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11582 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11583
11584 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11585 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11586 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11587 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11588 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11589 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11590 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11591 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11592 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11593 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11594 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11595 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11596 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11597 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11598 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11599 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11600
11601 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11602 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11603 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11604 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11605 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11606 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11607 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11608 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11609
11610 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11611 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11612 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11613 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11614 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11615 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11616 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11617 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11618
11619 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11620 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11621 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11622 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11623 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11624 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11625 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11626 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11627 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11628 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11629 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11630 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11631 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11632 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11633 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11634 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11635
11636 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11637 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11638 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11639 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11640 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11641 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11642 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11643 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11644 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11645 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11646 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11647 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11648 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11649 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11650 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11651 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11652
11653 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11654 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11655 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11656 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11657 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11658 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11659 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11660 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11661 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11662 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11663 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11664 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11665 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11666 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11667 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11668 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11669
11670 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11671 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11672 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11673 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11674 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11675 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11676 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11677 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11678 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11679 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11680 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11681 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11682 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11683 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11684 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11685 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11686
11687 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11688 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11689 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11690 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11691 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11692 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11693 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11694 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11695 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11696 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11697 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11698 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11699 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11700 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11701 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11702 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11703
11704 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11705 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11706 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11707 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11708 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11709 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11710 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11711 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11712 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11713 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11714 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11715 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11716 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11717 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11718 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11719 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11720
11721 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11722 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11723 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11724 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11725 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11726 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11727 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11728 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11729 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11730 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11731 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11732 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11733 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11734 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11735 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11736 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11737
11738 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11739 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11740 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11741 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11742 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11743 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11744 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11745 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11746 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11747 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11748 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11749 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11750 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11751 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11752 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11753 /* 0xff */ IEMOP_X4(iemOp_ud0),
11754};
11755AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11756
11757/** @} */
11758
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette