VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61020

Last change on this file since 61020 was 61020, checked in by vboxsync, 9 years ago

iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd: Try enable when no REM.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 599.1 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61020 2016-05-18 01:38:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1698{
1699 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1702 {
1703 /*
1704 * Register, register.
1705 */
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1707 IEM_MC_BEGIN(0, 0);
1708 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1710 else
1711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1712 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1713 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1714 IEM_MC_ADVANCE_RIP();
1715 IEM_MC_END();
1716 }
1717 else
1718 {
1719 /*
1720 * Register, memory.
1721 */
1722 IEM_MC_BEGIN(0, 2);
1723 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1725
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1728 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1730 else
1731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1732
1733 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1734 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1735
1736 IEM_MC_ADVANCE_RIP();
1737 IEM_MC_END();
1738 }
1739 return VINF_SUCCESS;
1740}
1741
1742
1743/** Opcode 0x0f 0x29. */
1744FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1745{
1746 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1749 {
1750 /*
1751 * Register, register.
1752 */
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1754 IEM_MC_BEGIN(0, 0);
1755 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1757 else
1758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1759 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1760 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1761 IEM_MC_ADVANCE_RIP();
1762 IEM_MC_END();
1763 }
1764 else
1765 {
1766 /*
1767 * Memory, register.
1768 */
1769 IEM_MC_BEGIN(0, 2);
1770 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1775 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1777 else
1778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1779
1780 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1781 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 return VINF_SUCCESS;
1787}
1788
1789
1790/** Opcode 0x0f 0x2a. */
1791FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1792
1793
1794/** Opcode 0x0f 0x2b. */
1795#ifndef VBOX_WITH_REM /** @todo figure out why this causes moderate regressions when enabled... Enabled for non-REM to hopefully make some headway there... */
1796FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1797{
1798 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1801 {
1802 /*
1803 * Register, memory.
1804 */
1805 IEM_MC_BEGIN(0, 2);
1806 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1808
1809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1810 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1811 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1812 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1813 else
1814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1815
1816 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1817 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1818
1819 IEM_MC_ADVANCE_RIP();
1820 IEM_MC_END();
1821 }
1822 /* The register, register encoding is invalid. */
1823 else
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825 return VINF_SUCCESS;
1826}
1827#else
1828FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1829#endif
1830
1831
1832/** Opcode 0x0f 0x2c. */
1833FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1834/** Opcode 0x0f 0x2d. */
1835FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1836/** Opcode 0x0f 0x2e. */
1837FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1838/** Opcode 0x0f 0x2f. */
1839FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1840
1841
1842/** Opcode 0x0f 0x30. */
1843FNIEMOP_DEF(iemOp_wrmsr)
1844{
1845 IEMOP_MNEMONIC("wrmsr");
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1848}
1849
1850
1851/** Opcode 0x0f 0x31. */
1852FNIEMOP_DEF(iemOp_rdtsc)
1853{
1854 IEMOP_MNEMONIC("rdtsc");
1855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1857}
1858
1859
1860/** Opcode 0x0f 0x33. */
1861FNIEMOP_DEF(iemOp_rdmsr)
1862{
1863 IEMOP_MNEMONIC("rdmsr");
1864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1866}
1867
1868
1869/** Opcode 0x0f 0x34. */
1870FNIEMOP_STUB(iemOp_rdpmc);
1871/** Opcode 0x0f 0x34. */
1872FNIEMOP_STUB(iemOp_sysenter);
1873/** Opcode 0x0f 0x35. */
1874FNIEMOP_STUB(iemOp_sysexit);
1875/** Opcode 0x0f 0x37. */
1876FNIEMOP_STUB(iemOp_getsec);
1877/** Opcode 0x0f 0x38. */
1878FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1879/** Opcode 0x0f 0x3a. */
1880FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1881
1882
1883/**
1884 * Implements a conditional move.
1885 *
1886 * Wish there was an obvious way to do this where we could share and reduce
1887 * code bloat.
1888 *
1889 * @param a_Cnd The conditional "microcode" operation.
1890 */
1891#define CMOV_X(a_Cnd) \
1892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1894 { \
1895 switch (pIemCpu->enmEffOpSize) \
1896 { \
1897 case IEMMODE_16BIT: \
1898 IEM_MC_BEGIN(0, 1); \
1899 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1900 a_Cnd { \
1901 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1902 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1903 } IEM_MC_ENDIF(); \
1904 IEM_MC_ADVANCE_RIP(); \
1905 IEM_MC_END(); \
1906 return VINF_SUCCESS; \
1907 \
1908 case IEMMODE_32BIT: \
1909 IEM_MC_BEGIN(0, 1); \
1910 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1911 a_Cnd { \
1912 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1913 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1914 } IEM_MC_ELSE() { \
1915 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1916 } IEM_MC_ENDIF(); \
1917 IEM_MC_ADVANCE_RIP(); \
1918 IEM_MC_END(); \
1919 return VINF_SUCCESS; \
1920 \
1921 case IEMMODE_64BIT: \
1922 IEM_MC_BEGIN(0, 1); \
1923 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1924 a_Cnd { \
1925 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1926 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1927 } IEM_MC_ENDIF(); \
1928 IEM_MC_ADVANCE_RIP(); \
1929 IEM_MC_END(); \
1930 return VINF_SUCCESS; \
1931 \
1932 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1933 } \
1934 } \
1935 else \
1936 { \
1937 switch (pIemCpu->enmEffOpSize) \
1938 { \
1939 case IEMMODE_16BIT: \
1940 IEM_MC_BEGIN(0, 2); \
1941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1942 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1944 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1945 a_Cnd { \
1946 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1947 } IEM_MC_ENDIF(); \
1948 IEM_MC_ADVANCE_RIP(); \
1949 IEM_MC_END(); \
1950 return VINF_SUCCESS; \
1951 \
1952 case IEMMODE_32BIT: \
1953 IEM_MC_BEGIN(0, 2); \
1954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1955 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1957 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1958 a_Cnd { \
1959 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1960 } IEM_MC_ELSE() { \
1961 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1962 } IEM_MC_ENDIF(); \
1963 IEM_MC_ADVANCE_RIP(); \
1964 IEM_MC_END(); \
1965 return VINF_SUCCESS; \
1966 \
1967 case IEMMODE_64BIT: \
1968 IEM_MC_BEGIN(0, 2); \
1969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1970 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1972 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1973 a_Cnd { \
1974 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1975 } IEM_MC_ENDIF(); \
1976 IEM_MC_ADVANCE_RIP(); \
1977 IEM_MC_END(); \
1978 return VINF_SUCCESS; \
1979 \
1980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1981 } \
1982 } do {} while (0)
1983
1984
1985
1986/** Opcode 0x0f 0x40. */
1987FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1988{
1989 IEMOP_MNEMONIC("cmovo Gv,Ev");
1990 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1991}
1992
1993
1994/** Opcode 0x0f 0x41. */
1995FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1996{
1997 IEMOP_MNEMONIC("cmovno Gv,Ev");
1998 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1999}
2000
2001
2002/** Opcode 0x0f 0x42. */
2003FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2004{
2005 IEMOP_MNEMONIC("cmovc Gv,Ev");
2006 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2007}
2008
2009
2010/** Opcode 0x0f 0x43. */
2011FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2012{
2013 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2014 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2015}
2016
2017
2018/** Opcode 0x0f 0x44. */
2019FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2020{
2021 IEMOP_MNEMONIC("cmove Gv,Ev");
2022 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2023}
2024
2025
2026/** Opcode 0x0f 0x45. */
2027FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2028{
2029 IEMOP_MNEMONIC("cmovne Gv,Ev");
2030 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2031}
2032
2033
2034/** Opcode 0x0f 0x46. */
2035FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2036{
2037 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2038 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2039}
2040
2041
2042/** Opcode 0x0f 0x47. */
2043FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2044{
2045 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2046 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2047}
2048
2049
2050/** Opcode 0x0f 0x48. */
2051FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2052{
2053 IEMOP_MNEMONIC("cmovs Gv,Ev");
2054 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2055}
2056
2057
2058/** Opcode 0x0f 0x49. */
2059FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2060{
2061 IEMOP_MNEMONIC("cmovns Gv,Ev");
2062 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2063}
2064
2065
2066/** Opcode 0x0f 0x4a. */
2067FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2068{
2069 IEMOP_MNEMONIC("cmovp Gv,Ev");
2070 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2071}
2072
2073
2074/** Opcode 0x0f 0x4b. */
2075FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2076{
2077 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2078 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2079}
2080
2081
2082/** Opcode 0x0f 0x4c. */
2083FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2084{
2085 IEMOP_MNEMONIC("cmovl Gv,Ev");
2086 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2087}
2088
2089
2090/** Opcode 0x0f 0x4d. */
2091FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2092{
2093 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2094 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2095}
2096
2097
2098/** Opcode 0x0f 0x4e. */
2099FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2100{
2101 IEMOP_MNEMONIC("cmovle Gv,Ev");
2102 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2103}
2104
2105
2106/** Opcode 0x0f 0x4f. */
2107FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2108{
2109 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2110 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2111}
2112
2113#undef CMOV_X
2114
2115/** Opcode 0x0f 0x50. */
2116FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2117/** Opcode 0x0f 0x51. */
2118FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2119/** Opcode 0x0f 0x52. */
2120FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2121/** Opcode 0x0f 0x53. */
2122FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2123/** Opcode 0x0f 0x54. */
2124FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2125/** Opcode 0x0f 0x55. */
2126FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2127/** Opcode 0x0f 0x56. */
2128FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2129/** Opcode 0x0f 0x57. */
2130FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2131/** Opcode 0x0f 0x58. */
2132FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2133/** Opcode 0x0f 0x59. */
2134FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2135/** Opcode 0x0f 0x5a. */
2136FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2137/** Opcode 0x0f 0x5b. */
2138FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2139/** Opcode 0x0f 0x5c. */
2140FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2141/** Opcode 0x0f 0x5d. */
2142FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2143/** Opcode 0x0f 0x5e. */
2144FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2145/** Opcode 0x0f 0x5f. */
2146FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2147
2148
2149/**
2150 * Common worker for SSE2 and MMX instructions on the forms:
2151 * pxxxx xmm1, xmm2/mem128
2152 * pxxxx mm1, mm2/mem32
2153 *
2154 * The 2nd operand is the first half of a register, which in the memory case
2155 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2156 * memory accessed for MMX.
2157 *
2158 * Exceptions type 4.
2159 */
2160FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2161{
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2164 {
2165 case IEM_OP_PRF_SIZE_OP: /* SSE */
2166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2167 {
2168 /*
2169 * Register, register.
2170 */
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEM_MC_BEGIN(2, 0);
2173 IEM_MC_ARG(uint128_t *, pDst, 0);
2174 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2175 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2177 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2179 IEM_MC_ADVANCE_RIP();
2180 IEM_MC_END();
2181 }
2182 else
2183 {
2184 /*
2185 * Register, memory.
2186 */
2187 IEM_MC_BEGIN(2, 2);
2188 IEM_MC_ARG(uint128_t *, pDst, 0);
2189 IEM_MC_LOCAL(uint64_t, uSrc);
2190 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2192
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2196 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2197
2198 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2199 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2200
2201 IEM_MC_ADVANCE_RIP();
2202 IEM_MC_END();
2203 }
2204 return VINF_SUCCESS;
2205
2206 case 0: /* MMX */
2207 if (!pImpl->pfnU64)
2208 return IEMOP_RAISE_INVALID_OPCODE();
2209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2210 {
2211 /*
2212 * Register, register.
2213 */
2214 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2215 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2217 IEM_MC_BEGIN(2, 0);
2218 IEM_MC_ARG(uint64_t *, pDst, 0);
2219 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2222 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2224 IEM_MC_ADVANCE_RIP();
2225 IEM_MC_END();
2226 }
2227 else
2228 {
2229 /*
2230 * Register, memory.
2231 */
2232 IEM_MC_BEGIN(2, 2);
2233 IEM_MC_ARG(uint64_t *, pDst, 0);
2234 IEM_MC_LOCAL(uint32_t, uSrc);
2235 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2237
2238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2241 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2242
2243 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2244 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2245
2246 IEM_MC_ADVANCE_RIP();
2247 IEM_MC_END();
2248 }
2249 return VINF_SUCCESS;
2250
2251 default:
2252 return IEMOP_RAISE_INVALID_OPCODE();
2253 }
2254}
2255
2256
2257/** Opcode 0x0f 0x60. */
2258FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2259{
2260 IEMOP_MNEMONIC("punpcklbw");
2261 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2262}
2263
2264
2265/** Opcode 0x0f 0x61. */
2266FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2267{
2268 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2269 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2270}
2271
2272
2273/** Opcode 0x0f 0x62. */
2274FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2275{
2276 IEMOP_MNEMONIC("punpckldq");
2277 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2278}
2279
2280
2281/** Opcode 0x0f 0x63. */
2282FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2283/** Opcode 0x0f 0x64. */
2284FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2285/** Opcode 0x0f 0x65. */
2286FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2287/** Opcode 0x0f 0x66. */
2288FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2289/** Opcode 0x0f 0x67. */
2290FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2291
2292
2293/**
2294 * Common worker for SSE2 and MMX instructions on the forms:
2295 * pxxxx xmm1, xmm2/mem128
2296 * pxxxx mm1, mm2/mem64
2297 *
2298 * The 2nd operand is the second half of a register, which in the memory case
2299 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2300 * where it may read the full 128 bits or only the upper 64 bits.
2301 *
2302 * Exceptions type 4.
2303 */
2304FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2305{
2306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2307 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2308 {
2309 case IEM_OP_PRF_SIZE_OP: /* SSE */
2310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2316 IEM_MC_BEGIN(2, 0);
2317 IEM_MC_ARG(uint128_t *, pDst, 0);
2318 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2320 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2321 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2322 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2323 IEM_MC_ADVANCE_RIP();
2324 IEM_MC_END();
2325 }
2326 else
2327 {
2328 /*
2329 * Register, memory.
2330 */
2331 IEM_MC_BEGIN(2, 2);
2332 IEM_MC_ARG(uint128_t *, pDst, 0);
2333 IEM_MC_LOCAL(uint128_t, uSrc);
2334 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2340 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2341
2342 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2343 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2344
2345 IEM_MC_ADVANCE_RIP();
2346 IEM_MC_END();
2347 }
2348 return VINF_SUCCESS;
2349
2350 case 0: /* MMX */
2351 if (!pImpl->pfnU64)
2352 return IEMOP_RAISE_INVALID_OPCODE();
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2359 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint64_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2365 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2366 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2367 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2368 IEM_MC_ADVANCE_RIP();
2369 IEM_MC_END();
2370 }
2371 else
2372 {
2373 /*
2374 * Register, memory.
2375 */
2376 IEM_MC_BEGIN(2, 2);
2377 IEM_MC_ARG(uint64_t *, pDst, 0);
2378 IEM_MC_LOCAL(uint64_t, uSrc);
2379 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2381
2382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2385 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2386
2387 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2388 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2389
2390 IEM_MC_ADVANCE_RIP();
2391 IEM_MC_END();
2392 }
2393 return VINF_SUCCESS;
2394
2395 default:
2396 return IEMOP_RAISE_INVALID_OPCODE();
2397 }
2398}
2399
2400
2401/** Opcode 0x0f 0x68. */
2402FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2403{
2404 IEMOP_MNEMONIC("punpckhbw");
2405 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2406}
2407
2408
2409/** Opcode 0x0f 0x69. */
2410FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2411{
2412 IEMOP_MNEMONIC("punpckhwd");
2413 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2414}
2415
2416
2417/** Opcode 0x0f 0x6a. */
2418FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2419{
2420 IEMOP_MNEMONIC("punpckhdq");
2421 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2422}
2423
2424/** Opcode 0x0f 0x6b. */
2425FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2426
2427
2428/** Opcode 0x0f 0x6c. */
2429FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2430{
2431 IEMOP_MNEMONIC("punpcklqdq");
2432 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2433}
2434
2435
2436/** Opcode 0x0f 0x6d. */
2437FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2438{
2439 IEMOP_MNEMONIC("punpckhqdq");
2440 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2441}
2442
2443
2444/** Opcode 0x0f 0x6e. */
2445FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2446{
2447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2448 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2449 {
2450 case IEM_OP_PRF_SIZE_OP: /* SSE */
2451 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2453 {
2454 /* XMM, greg*/
2455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2456 IEM_MC_BEGIN(0, 1);
2457 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2458 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2459 {
2460 IEM_MC_LOCAL(uint64_t, u64Tmp);
2461 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2462 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2463 }
2464 else
2465 {
2466 IEM_MC_LOCAL(uint32_t, u32Tmp);
2467 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2468 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2469 }
2470 IEM_MC_ADVANCE_RIP();
2471 IEM_MC_END();
2472 }
2473 else
2474 {
2475 /* XMM, [mem] */
2476 IEM_MC_BEGIN(0, 2);
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2482 {
2483 IEM_MC_LOCAL(uint64_t, u64Tmp);
2484 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2485 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2486 }
2487 else
2488 {
2489 IEM_MC_LOCAL(uint32_t, u32Tmp);
2490 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2491 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2492 }
2493 IEM_MC_ADVANCE_RIP();
2494 IEM_MC_END();
2495 }
2496 return VINF_SUCCESS;
2497
2498 case 0: /* MMX */
2499 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2500 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2501 {
2502 /* MMX, greg */
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504 IEM_MC_BEGIN(0, 1);
2505 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2506 IEM_MC_LOCAL(uint64_t, u64Tmp);
2507 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2508 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2509 else
2510 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2511 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2512 IEM_MC_ADVANCE_RIP();
2513 IEM_MC_END();
2514 }
2515 else
2516 {
2517 /* MMX, [mem] */
2518 IEM_MC_BEGIN(0, 2);
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2520 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2524 {
2525 IEM_MC_LOCAL(uint64_t, u64Tmp);
2526 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2527 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2528 }
2529 else
2530 {
2531 IEM_MC_LOCAL(uint32_t, u32Tmp);
2532 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2533 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2534 }
2535 IEM_MC_ADVANCE_RIP();
2536 IEM_MC_END();
2537 }
2538 return VINF_SUCCESS;
2539
2540 default:
2541 return IEMOP_RAISE_INVALID_OPCODE();
2542 }
2543}
2544
2545
2546/** Opcode 0x0f 0x6f. */
2547FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2548{
2549 bool fAligned = false;
2550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2551 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2552 {
2553 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2554 fAligned = true;
2555 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2556 if (fAligned)
2557 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2558 else
2559 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2561 {
2562 /*
2563 * Register, register.
2564 */
2565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2566 IEM_MC_BEGIN(0, 1);
2567 IEM_MC_LOCAL(uint128_t, u128Tmp);
2568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2569 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2570 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /*
2577 * Register, memory.
2578 */
2579 IEM_MC_BEGIN(0, 2);
2580 IEM_MC_LOCAL(uint128_t, u128Tmp);
2581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2582
2583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2586 if (fAligned)
2587 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2588 else
2589 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2590 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2591
2592 IEM_MC_ADVANCE_RIP();
2593 IEM_MC_END();
2594 }
2595 return VINF_SUCCESS;
2596
2597 case 0: /* MMX */
2598 IEMOP_MNEMONIC("movq Pq,Qq");
2599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2600 {
2601 /*
2602 * Register, register.
2603 */
2604 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2605 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2607 IEM_MC_BEGIN(0, 1);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2610 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2611 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2612 IEM_MC_ADVANCE_RIP();
2613 IEM_MC_END();
2614 }
2615 else
2616 {
2617 /*
2618 * Register, memory.
2619 */
2620 IEM_MC_BEGIN(0, 2);
2621 IEM_MC_LOCAL(uint64_t, u64Tmp);
2622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2623
2624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2626 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2627 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2628 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2629
2630 IEM_MC_ADVANCE_RIP();
2631 IEM_MC_END();
2632 }
2633 return VINF_SUCCESS;
2634
2635 default:
2636 return IEMOP_RAISE_INVALID_OPCODE();
2637 }
2638}
2639
2640
2641/** Opcode 0x0f 0x70. The immediate here is evil! */
2642FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 case IEM_OP_PRF_REPNZ: /* SSE */
2649 case IEM_OP_PRF_REPZ: /* SSE */
2650 {
2651 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2652 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2653 {
2654 case IEM_OP_PRF_SIZE_OP:
2655 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2656 pfnAImpl = iemAImpl_pshufd;
2657 break;
2658 case IEM_OP_PRF_REPNZ:
2659 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2660 pfnAImpl = iemAImpl_pshuflw;
2661 break;
2662 case IEM_OP_PRF_REPZ:
2663 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2664 pfnAImpl = iemAImpl_pshufhw;
2665 break;
2666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2667 }
2668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2669 {
2670 /*
2671 * Register, register.
2672 */
2673 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2675
2676 IEM_MC_BEGIN(3, 0);
2677 IEM_MC_ARG(uint128_t *, pDst, 0);
2678 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2679 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2680 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2681 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2682 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2683 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2684 IEM_MC_ADVANCE_RIP();
2685 IEM_MC_END();
2686 }
2687 else
2688 {
2689 /*
2690 * Register, memory.
2691 */
2692 IEM_MC_BEGIN(3, 2);
2693 IEM_MC_ARG(uint128_t *, pDst, 0);
2694 IEM_MC_LOCAL(uint128_t, uSrc);
2695 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697
2698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2699 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2700 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2703
2704 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2705 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2706 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2707
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 return VINF_SUCCESS;
2712 }
2713
2714 case 0: /* MMX Extension */
2715 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2717 {
2718 /*
2719 * Register, register.
2720 */
2721 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723
2724 IEM_MC_BEGIN(3, 0);
2725 IEM_MC_ARG(uint64_t *, pDst, 0);
2726 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2727 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2728 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2729 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2730 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2731 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2732 IEM_MC_ADVANCE_RIP();
2733 IEM_MC_END();
2734 }
2735 else
2736 {
2737 /*
2738 * Register, memory.
2739 */
2740 IEM_MC_BEGIN(3, 2);
2741 IEM_MC_ARG(uint64_t *, pDst, 0);
2742 IEM_MC_LOCAL(uint64_t, uSrc);
2743 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2745
2746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2747 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2748 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2751
2752 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2753 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2754 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2755
2756 IEM_MC_ADVANCE_RIP();
2757 IEM_MC_END();
2758 }
2759 return VINF_SUCCESS;
2760
2761 default:
2762 return IEMOP_RAISE_INVALID_OPCODE();
2763 }
2764}
2765
2766
2767/** Opcode 0x0f 0x71 11/2. */
2768FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2769
2770/** Opcode 0x66 0x0f 0x71 11/2. */
2771FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2772
2773/** Opcode 0x0f 0x71 11/4. */
2774FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2775
2776/** Opcode 0x66 0x0f 0x71 11/4. */
2777FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2778
2779/** Opcode 0x0f 0x71 11/6. */
2780FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2781
2782/** Opcode 0x66 0x0f 0x71 11/6. */
2783FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2784
2785
2786/** Opcode 0x0f 0x71. */
2787FNIEMOP_DEF(iemOp_Grp12)
2788{
2789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2790 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2791 return IEMOP_RAISE_INVALID_OPCODE();
2792 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2793 {
2794 case 0: case 1: case 3: case 5: case 7:
2795 return IEMOP_RAISE_INVALID_OPCODE();
2796 case 2:
2797 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2798 {
2799 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2800 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2801 default: return IEMOP_RAISE_INVALID_OPCODE();
2802 }
2803 case 4:
2804 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2805 {
2806 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2807 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2808 default: return IEMOP_RAISE_INVALID_OPCODE();
2809 }
2810 case 6:
2811 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2812 {
2813 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2814 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2815 default: return IEMOP_RAISE_INVALID_OPCODE();
2816 }
2817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2818 }
2819}
2820
2821
2822/** Opcode 0x0f 0x72 11/2. */
2823FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2824
2825/** Opcode 0x66 0x0f 0x72 11/2. */
2826FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2827
2828/** Opcode 0x0f 0x72 11/4. */
2829FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2830
2831/** Opcode 0x66 0x0f 0x72 11/4. */
2832FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2833
2834/** Opcode 0x0f 0x72 11/6. */
2835FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2836
2837/** Opcode 0x66 0x0f 0x72 11/6. */
2838FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2839
2840
2841/** Opcode 0x0f 0x72. */
2842FNIEMOP_DEF(iemOp_Grp13)
2843{
2844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2845 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2848 {
2849 case 0: case 1: case 3: case 5: case 7:
2850 return IEMOP_RAISE_INVALID_OPCODE();
2851 case 2:
2852 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2853 {
2854 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2855 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2856 default: return IEMOP_RAISE_INVALID_OPCODE();
2857 }
2858 case 4:
2859 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2860 {
2861 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2862 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2863 default: return IEMOP_RAISE_INVALID_OPCODE();
2864 }
2865 case 6:
2866 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2867 {
2868 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2869 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2870 default: return IEMOP_RAISE_INVALID_OPCODE();
2871 }
2872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2873 }
2874}
2875
2876
2877/** Opcode 0x0f 0x73 11/2. */
2878FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2879
2880/** Opcode 0x66 0x0f 0x73 11/2. */
2881FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2882
2883/** Opcode 0x66 0x0f 0x73 11/3. */
2884FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2885
2886/** Opcode 0x0f 0x73 11/6. */
2887FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2888
2889/** Opcode 0x66 0x0f 0x73 11/6. */
2890FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2891
2892/** Opcode 0x66 0x0f 0x73 11/7. */
2893FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2894
2895
2896/** Opcode 0x0f 0x73. */
2897FNIEMOP_DEF(iemOp_Grp14)
2898{
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2901 return IEMOP_RAISE_INVALID_OPCODE();
2902 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2903 {
2904 case 0: case 1: case 4: case 5:
2905 return IEMOP_RAISE_INVALID_OPCODE();
2906 case 2:
2907 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2908 {
2909 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2910 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2911 default: return IEMOP_RAISE_INVALID_OPCODE();
2912 }
2913 case 3:
2914 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2915 {
2916 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2917 default: return IEMOP_RAISE_INVALID_OPCODE();
2918 }
2919 case 6:
2920 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2921 {
2922 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2923 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2924 default: return IEMOP_RAISE_INVALID_OPCODE();
2925 }
2926 case 7:
2927 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2928 {
2929 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2930 default: return IEMOP_RAISE_INVALID_OPCODE();
2931 }
2932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2933 }
2934}
2935
2936
2937/**
2938 * Common worker for SSE2 and MMX instructions on the forms:
2939 * pxxx mm1, mm2/mem64
2940 * pxxx xmm1, xmm2/mem128
2941 *
2942 * Proper alignment of the 128-bit operand is enforced.
2943 * Exceptions type 4. SSE2 and MMX cpuid checks.
2944 */
2945FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2946{
2947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2948 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2949 {
2950 case IEM_OP_PRF_SIZE_OP: /* SSE */
2951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2952 {
2953 /*
2954 * Register, register.
2955 */
2956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2957 IEM_MC_BEGIN(2, 0);
2958 IEM_MC_ARG(uint128_t *, pDst, 0);
2959 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2960 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2961 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2962 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2963 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 else
2968 {
2969 /*
2970 * Register, memory.
2971 */
2972 IEM_MC_BEGIN(2, 2);
2973 IEM_MC_ARG(uint128_t *, pDst, 0);
2974 IEM_MC_LOCAL(uint128_t, uSrc);
2975 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2977
2978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2980 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2981 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2982
2983 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2984 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2985
2986 IEM_MC_ADVANCE_RIP();
2987 IEM_MC_END();
2988 }
2989 return VINF_SUCCESS;
2990
2991 case 0: /* MMX */
2992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2993 {
2994 /*
2995 * Register, register.
2996 */
2997 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2998 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 IEM_MC_BEGIN(2, 0);
3001 IEM_MC_ARG(uint64_t *, pDst, 0);
3002 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3003 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3004 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3005 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3006 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 }
3010 else
3011 {
3012 /*
3013 * Register, memory.
3014 */
3015 IEM_MC_BEGIN(2, 2);
3016 IEM_MC_ARG(uint64_t *, pDst, 0);
3017 IEM_MC_LOCAL(uint64_t, uSrc);
3018 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3020
3021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3024 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3025
3026 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3027 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3028
3029 IEM_MC_ADVANCE_RIP();
3030 IEM_MC_END();
3031 }
3032 return VINF_SUCCESS;
3033
3034 default:
3035 return IEMOP_RAISE_INVALID_OPCODE();
3036 }
3037}
3038
3039
3040/** Opcode 0x0f 0x74. */
3041FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3042{
3043 IEMOP_MNEMONIC("pcmpeqb");
3044 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3045}
3046
3047
3048/** Opcode 0x0f 0x75. */
3049FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3050{
3051 IEMOP_MNEMONIC("pcmpeqw");
3052 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3053}
3054
3055
3056/** Opcode 0x0f 0x76. */
3057FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3058{
3059 IEMOP_MNEMONIC("pcmpeqd");
3060 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3061}
3062
3063
3064/** Opcode 0x0f 0x77. */
3065FNIEMOP_STUB(iemOp_emms);
3066/** Opcode 0x0f 0x78. */
3067FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3068/** Opcode 0x0f 0x79. */
3069FNIEMOP_UD_STUB(iemOp_vmwrite);
3070/** Opcode 0x0f 0x7c. */
3071FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3072/** Opcode 0x0f 0x7d. */
3073FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3074
3075
3076/** Opcode 0x0f 0x7e. */
3077FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3078{
3079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3080 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3081 {
3082 case IEM_OP_PRF_SIZE_OP: /* SSE */
3083 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3085 {
3086 /* greg, XMM */
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEM_MC_BEGIN(0, 1);
3089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3090 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3091 {
3092 IEM_MC_LOCAL(uint64_t, u64Tmp);
3093 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3094 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3095 }
3096 else
3097 {
3098 IEM_MC_LOCAL(uint32_t, u32Tmp);
3099 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3100 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3101 }
3102 IEM_MC_ADVANCE_RIP();
3103 IEM_MC_END();
3104 }
3105 else
3106 {
3107 /* [mem], XMM */
3108 IEM_MC_BEGIN(0, 2);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3114 {
3115 IEM_MC_LOCAL(uint64_t, u64Tmp);
3116 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3117 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3118 }
3119 else
3120 {
3121 IEM_MC_LOCAL(uint32_t, u32Tmp);
3122 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3123 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3124 }
3125 IEM_MC_ADVANCE_RIP();
3126 IEM_MC_END();
3127 }
3128 return VINF_SUCCESS;
3129
3130 case 0: /* MMX */
3131 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3133 {
3134 /* greg, MMX */
3135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3136 IEM_MC_BEGIN(0, 1);
3137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3138 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3139 {
3140 IEM_MC_LOCAL(uint64_t, u64Tmp);
3141 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3142 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3143 }
3144 else
3145 {
3146 IEM_MC_LOCAL(uint32_t, u32Tmp);
3147 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3148 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3149 }
3150 IEM_MC_ADVANCE_RIP();
3151 IEM_MC_END();
3152 }
3153 else
3154 {
3155 /* [mem], MMX */
3156 IEM_MC_BEGIN(0, 2);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3162 {
3163 IEM_MC_LOCAL(uint64_t, u64Tmp);
3164 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3165 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3166 }
3167 else
3168 {
3169 IEM_MC_LOCAL(uint32_t, u32Tmp);
3170 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3171 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3172 }
3173 IEM_MC_ADVANCE_RIP();
3174 IEM_MC_END();
3175 }
3176 return VINF_SUCCESS;
3177
3178 default:
3179 return IEMOP_RAISE_INVALID_OPCODE();
3180 }
3181}
3182
3183
3184/** Opcode 0x0f 0x7f. */
3185FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3186{
3187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3188 bool fAligned = false;
3189 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3190 {
3191 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3192 fAligned = true;
3193 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3194 if (fAligned)
3195 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3196 else
3197 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3199 {
3200 /*
3201 * Register, register.
3202 */
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_BEGIN(0, 1);
3205 IEM_MC_LOCAL(uint128_t, u128Tmp);
3206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3207 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3208 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 else
3213 {
3214 /*
3215 * Register, memory.
3216 */
3217 IEM_MC_BEGIN(0, 2);
3218 IEM_MC_LOCAL(uint128_t, u128Tmp);
3219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3220
3221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3223 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3224 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3225 if (fAligned)
3226 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3227 else
3228 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3229
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 return VINF_SUCCESS;
3234
3235 case 0: /* MMX */
3236 IEMOP_MNEMONIC("movq Qq,Pq");
3237
3238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3239 {
3240 /*
3241 * Register, register.
3242 */
3243 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3244 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 IEM_MC_BEGIN(0, 1);
3247 IEM_MC_LOCAL(uint64_t, u64Tmp);
3248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3249 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3250 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 /*
3257 * Register, memory.
3258 */
3259 IEM_MC_BEGIN(0, 2);
3260 IEM_MC_LOCAL(uint64_t, u64Tmp);
3261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3262
3263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3266 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3267 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3268
3269 IEM_MC_ADVANCE_RIP();
3270 IEM_MC_END();
3271 }
3272 return VINF_SUCCESS;
3273
3274 default:
3275 return IEMOP_RAISE_INVALID_OPCODE();
3276 }
3277}
3278
3279
3280
3281/** Opcode 0x0f 0x80. */
3282FNIEMOP_DEF(iemOp_jo_Jv)
3283{
3284 IEMOP_MNEMONIC("jo Jv");
3285 IEMOP_HLP_MIN_386();
3286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3287 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3288 {
3289 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3290 IEMOP_HLP_NO_LOCK_PREFIX();
3291
3292 IEM_MC_BEGIN(0, 0);
3293 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3294 IEM_MC_REL_JMP_S16(i16Imm);
3295 } IEM_MC_ELSE() {
3296 IEM_MC_ADVANCE_RIP();
3297 } IEM_MC_ENDIF();
3298 IEM_MC_END();
3299 }
3300 else
3301 {
3302 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3303 IEMOP_HLP_NO_LOCK_PREFIX();
3304
3305 IEM_MC_BEGIN(0, 0);
3306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3307 IEM_MC_REL_JMP_S32(i32Imm);
3308 } IEM_MC_ELSE() {
3309 IEM_MC_ADVANCE_RIP();
3310 } IEM_MC_ENDIF();
3311 IEM_MC_END();
3312 }
3313 return VINF_SUCCESS;
3314}
3315
3316
3317/** Opcode 0x0f 0x81. */
3318FNIEMOP_DEF(iemOp_jno_Jv)
3319{
3320 IEMOP_MNEMONIC("jno Jv");
3321 IEMOP_HLP_MIN_386();
3322 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3323 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3324 {
3325 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3326 IEMOP_HLP_NO_LOCK_PREFIX();
3327
3328 IEM_MC_BEGIN(0, 0);
3329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3330 IEM_MC_ADVANCE_RIP();
3331 } IEM_MC_ELSE() {
3332 IEM_MC_REL_JMP_S16(i16Imm);
3333 } IEM_MC_ENDIF();
3334 IEM_MC_END();
3335 }
3336 else
3337 {
3338 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3339 IEMOP_HLP_NO_LOCK_PREFIX();
3340
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3343 IEM_MC_ADVANCE_RIP();
3344 } IEM_MC_ELSE() {
3345 IEM_MC_REL_JMP_S32(i32Imm);
3346 } IEM_MC_ENDIF();
3347 IEM_MC_END();
3348 }
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** Opcode 0x0f 0x82. */
3354FNIEMOP_DEF(iemOp_jc_Jv)
3355{
3356 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3357 IEMOP_HLP_MIN_386();
3358 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3359 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3360 {
3361 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3362 IEMOP_HLP_NO_LOCK_PREFIX();
3363
3364 IEM_MC_BEGIN(0, 0);
3365 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3366 IEM_MC_REL_JMP_S16(i16Imm);
3367 } IEM_MC_ELSE() {
3368 IEM_MC_ADVANCE_RIP();
3369 } IEM_MC_ENDIF();
3370 IEM_MC_END();
3371 }
3372 else
3373 {
3374 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3375 IEMOP_HLP_NO_LOCK_PREFIX();
3376
3377 IEM_MC_BEGIN(0, 0);
3378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3379 IEM_MC_REL_JMP_S32(i32Imm);
3380 } IEM_MC_ELSE() {
3381 IEM_MC_ADVANCE_RIP();
3382 } IEM_MC_ENDIF();
3383 IEM_MC_END();
3384 }
3385 return VINF_SUCCESS;
3386}
3387
3388
3389/** Opcode 0x0f 0x83. */
3390FNIEMOP_DEF(iemOp_jnc_Jv)
3391{
3392 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3393 IEMOP_HLP_MIN_386();
3394 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3395 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3396 {
3397 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3398 IEMOP_HLP_NO_LOCK_PREFIX();
3399
3400 IEM_MC_BEGIN(0, 0);
3401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3402 IEM_MC_ADVANCE_RIP();
3403 } IEM_MC_ELSE() {
3404 IEM_MC_REL_JMP_S16(i16Imm);
3405 } IEM_MC_ENDIF();
3406 IEM_MC_END();
3407 }
3408 else
3409 {
3410 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3411 IEMOP_HLP_NO_LOCK_PREFIX();
3412
3413 IEM_MC_BEGIN(0, 0);
3414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3415 IEM_MC_ADVANCE_RIP();
3416 } IEM_MC_ELSE() {
3417 IEM_MC_REL_JMP_S32(i32Imm);
3418 } IEM_MC_ENDIF();
3419 IEM_MC_END();
3420 }
3421 return VINF_SUCCESS;
3422}
3423
3424
3425/** Opcode 0x0f 0x84. */
3426FNIEMOP_DEF(iemOp_je_Jv)
3427{
3428 IEMOP_MNEMONIC("je/jz Jv");
3429 IEMOP_HLP_MIN_386();
3430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3431 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3432 {
3433 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3434 IEMOP_HLP_NO_LOCK_PREFIX();
3435
3436 IEM_MC_BEGIN(0, 0);
3437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3438 IEM_MC_REL_JMP_S16(i16Imm);
3439 } IEM_MC_ELSE() {
3440 IEM_MC_ADVANCE_RIP();
3441 } IEM_MC_ENDIF();
3442 IEM_MC_END();
3443 }
3444 else
3445 {
3446 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3447 IEMOP_HLP_NO_LOCK_PREFIX();
3448
3449 IEM_MC_BEGIN(0, 0);
3450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3451 IEM_MC_REL_JMP_S32(i32Imm);
3452 } IEM_MC_ELSE() {
3453 IEM_MC_ADVANCE_RIP();
3454 } IEM_MC_ENDIF();
3455 IEM_MC_END();
3456 }
3457 return VINF_SUCCESS;
3458}
3459
3460
3461/** Opcode 0x0f 0x85. */
3462FNIEMOP_DEF(iemOp_jne_Jv)
3463{
3464 IEMOP_MNEMONIC("jne/jnz Jv");
3465 IEMOP_HLP_MIN_386();
3466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3467 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3468 {
3469 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3470 IEMOP_HLP_NO_LOCK_PREFIX();
3471
3472 IEM_MC_BEGIN(0, 0);
3473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3474 IEM_MC_ADVANCE_RIP();
3475 } IEM_MC_ELSE() {
3476 IEM_MC_REL_JMP_S16(i16Imm);
3477 } IEM_MC_ENDIF();
3478 IEM_MC_END();
3479 }
3480 else
3481 {
3482 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3483 IEMOP_HLP_NO_LOCK_PREFIX();
3484
3485 IEM_MC_BEGIN(0, 0);
3486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3487 IEM_MC_ADVANCE_RIP();
3488 } IEM_MC_ELSE() {
3489 IEM_MC_REL_JMP_S32(i32Imm);
3490 } IEM_MC_ENDIF();
3491 IEM_MC_END();
3492 }
3493 return VINF_SUCCESS;
3494}
3495
3496
3497/** Opcode 0x0f 0x86. */
3498FNIEMOP_DEF(iemOp_jbe_Jv)
3499{
3500 IEMOP_MNEMONIC("jbe/jna Jv");
3501 IEMOP_HLP_MIN_386();
3502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3503 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3504 {
3505 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3506 IEMOP_HLP_NO_LOCK_PREFIX();
3507
3508 IEM_MC_BEGIN(0, 0);
3509 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3510 IEM_MC_REL_JMP_S16(i16Imm);
3511 } IEM_MC_ELSE() {
3512 IEM_MC_ADVANCE_RIP();
3513 } IEM_MC_ENDIF();
3514 IEM_MC_END();
3515 }
3516 else
3517 {
3518 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3519 IEMOP_HLP_NO_LOCK_PREFIX();
3520
3521 IEM_MC_BEGIN(0, 0);
3522 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3523 IEM_MC_REL_JMP_S32(i32Imm);
3524 } IEM_MC_ELSE() {
3525 IEM_MC_ADVANCE_RIP();
3526 } IEM_MC_ENDIF();
3527 IEM_MC_END();
3528 }
3529 return VINF_SUCCESS;
3530}
3531
3532
3533/** Opcode 0x0f 0x87. */
3534FNIEMOP_DEF(iemOp_jnbe_Jv)
3535{
3536 IEMOP_MNEMONIC("jnbe/ja Jv");
3537 IEMOP_HLP_MIN_386();
3538 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3539 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3540 {
3541 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3542 IEMOP_HLP_NO_LOCK_PREFIX();
3543
3544 IEM_MC_BEGIN(0, 0);
3545 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3546 IEM_MC_ADVANCE_RIP();
3547 } IEM_MC_ELSE() {
3548 IEM_MC_REL_JMP_S16(i16Imm);
3549 } IEM_MC_ENDIF();
3550 IEM_MC_END();
3551 }
3552 else
3553 {
3554 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3555 IEMOP_HLP_NO_LOCK_PREFIX();
3556
3557 IEM_MC_BEGIN(0, 0);
3558 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3559 IEM_MC_ADVANCE_RIP();
3560 } IEM_MC_ELSE() {
3561 IEM_MC_REL_JMP_S32(i32Imm);
3562 } IEM_MC_ENDIF();
3563 IEM_MC_END();
3564 }
3565 return VINF_SUCCESS;
3566}
3567
3568
3569/** Opcode 0x0f 0x88. */
3570FNIEMOP_DEF(iemOp_js_Jv)
3571{
3572 IEMOP_MNEMONIC("js Jv");
3573 IEMOP_HLP_MIN_386();
3574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3575 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3576 {
3577 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3578 IEMOP_HLP_NO_LOCK_PREFIX();
3579
3580 IEM_MC_BEGIN(0, 0);
3581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3582 IEM_MC_REL_JMP_S16(i16Imm);
3583 } IEM_MC_ELSE() {
3584 IEM_MC_ADVANCE_RIP();
3585 } IEM_MC_ENDIF();
3586 IEM_MC_END();
3587 }
3588 else
3589 {
3590 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3591 IEMOP_HLP_NO_LOCK_PREFIX();
3592
3593 IEM_MC_BEGIN(0, 0);
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3595 IEM_MC_REL_JMP_S32(i32Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600 }
3601 return VINF_SUCCESS;
3602}
3603
3604
3605/** Opcode 0x0f 0x89. */
3606FNIEMOP_DEF(iemOp_jns_Jv)
3607{
3608 IEMOP_MNEMONIC("jns Jv");
3609 IEMOP_HLP_MIN_386();
3610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3611 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3612 {
3613 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3614 IEMOP_HLP_NO_LOCK_PREFIX();
3615
3616 IEM_MC_BEGIN(0, 0);
3617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3618 IEM_MC_ADVANCE_RIP();
3619 } IEM_MC_ELSE() {
3620 IEM_MC_REL_JMP_S16(i16Imm);
3621 } IEM_MC_ENDIF();
3622 IEM_MC_END();
3623 }
3624 else
3625 {
3626 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3627 IEMOP_HLP_NO_LOCK_PREFIX();
3628
3629 IEM_MC_BEGIN(0, 0);
3630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3631 IEM_MC_ADVANCE_RIP();
3632 } IEM_MC_ELSE() {
3633 IEM_MC_REL_JMP_S32(i32Imm);
3634 } IEM_MC_ENDIF();
3635 IEM_MC_END();
3636 }
3637 return VINF_SUCCESS;
3638}
3639
3640
3641/** Opcode 0x0f 0x8a. */
3642FNIEMOP_DEF(iemOp_jp_Jv)
3643{
3644 IEMOP_MNEMONIC("jp Jv");
3645 IEMOP_HLP_MIN_386();
3646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3647 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3648 {
3649 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3650 IEMOP_HLP_NO_LOCK_PREFIX();
3651
3652 IEM_MC_BEGIN(0, 0);
3653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3654 IEM_MC_REL_JMP_S16(i16Imm);
3655 } IEM_MC_ELSE() {
3656 IEM_MC_ADVANCE_RIP();
3657 } IEM_MC_ENDIF();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3663 IEMOP_HLP_NO_LOCK_PREFIX();
3664
3665 IEM_MC_BEGIN(0, 0);
3666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3667 IEM_MC_REL_JMP_S32(i32Imm);
3668 } IEM_MC_ELSE() {
3669 IEM_MC_ADVANCE_RIP();
3670 } IEM_MC_ENDIF();
3671 IEM_MC_END();
3672 }
3673 return VINF_SUCCESS;
3674}
3675
3676
3677/** Opcode 0x0f 0x8b. */
3678FNIEMOP_DEF(iemOp_jnp_Jv)
3679{
3680 IEMOP_MNEMONIC("jo Jv");
3681 IEMOP_HLP_MIN_386();
3682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3683 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3684 {
3685 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3686 IEMOP_HLP_NO_LOCK_PREFIX();
3687
3688 IEM_MC_BEGIN(0, 0);
3689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3690 IEM_MC_ADVANCE_RIP();
3691 } IEM_MC_ELSE() {
3692 IEM_MC_REL_JMP_S16(i16Imm);
3693 } IEM_MC_ENDIF();
3694 IEM_MC_END();
3695 }
3696 else
3697 {
3698 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3699 IEMOP_HLP_NO_LOCK_PREFIX();
3700
3701 IEM_MC_BEGIN(0, 0);
3702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3703 IEM_MC_ADVANCE_RIP();
3704 } IEM_MC_ELSE() {
3705 IEM_MC_REL_JMP_S32(i32Imm);
3706 } IEM_MC_ENDIF();
3707 IEM_MC_END();
3708 }
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** Opcode 0x0f 0x8c. */
3714FNIEMOP_DEF(iemOp_jl_Jv)
3715{
3716 IEMOP_MNEMONIC("jl/jnge Jv");
3717 IEMOP_HLP_MIN_386();
3718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3719 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3720 {
3721 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3722 IEMOP_HLP_NO_LOCK_PREFIX();
3723
3724 IEM_MC_BEGIN(0, 0);
3725 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3726 IEM_MC_REL_JMP_S16(i16Imm);
3727 } IEM_MC_ELSE() {
3728 IEM_MC_ADVANCE_RIP();
3729 } IEM_MC_ENDIF();
3730 IEM_MC_END();
3731 }
3732 else
3733 {
3734 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3735 IEMOP_HLP_NO_LOCK_PREFIX();
3736
3737 IEM_MC_BEGIN(0, 0);
3738 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3739 IEM_MC_REL_JMP_S32(i32Imm);
3740 } IEM_MC_ELSE() {
3741 IEM_MC_ADVANCE_RIP();
3742 } IEM_MC_ENDIF();
3743 IEM_MC_END();
3744 }
3745 return VINF_SUCCESS;
3746}
3747
3748
3749/** Opcode 0x0f 0x8d. */
3750FNIEMOP_DEF(iemOp_jnl_Jv)
3751{
3752 IEMOP_MNEMONIC("jnl/jge Jv");
3753 IEMOP_HLP_MIN_386();
3754 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3755 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3756 {
3757 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3758 IEMOP_HLP_NO_LOCK_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0);
3761 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3762 IEM_MC_ADVANCE_RIP();
3763 } IEM_MC_ELSE() {
3764 IEM_MC_REL_JMP_S16(i16Imm);
3765 } IEM_MC_ENDIF();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3771 IEMOP_HLP_NO_LOCK_PREFIX();
3772
3773 IEM_MC_BEGIN(0, 0);
3774 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3775 IEM_MC_ADVANCE_RIP();
3776 } IEM_MC_ELSE() {
3777 IEM_MC_REL_JMP_S32(i32Imm);
3778 } IEM_MC_ENDIF();
3779 IEM_MC_END();
3780 }
3781 return VINF_SUCCESS;
3782}
3783
3784
3785/** Opcode 0x0f 0x8e. */
3786FNIEMOP_DEF(iemOp_jle_Jv)
3787{
3788 IEMOP_MNEMONIC("jle/jng Jv");
3789 IEMOP_HLP_MIN_386();
3790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3791 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3792 {
3793 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3794 IEMOP_HLP_NO_LOCK_PREFIX();
3795
3796 IEM_MC_BEGIN(0, 0);
3797 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3798 IEM_MC_REL_JMP_S16(i16Imm);
3799 } IEM_MC_ELSE() {
3800 IEM_MC_ADVANCE_RIP();
3801 } IEM_MC_ENDIF();
3802 IEM_MC_END();
3803 }
3804 else
3805 {
3806 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3807 IEMOP_HLP_NO_LOCK_PREFIX();
3808
3809 IEM_MC_BEGIN(0, 0);
3810 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3811 IEM_MC_REL_JMP_S32(i32Imm);
3812 } IEM_MC_ELSE() {
3813 IEM_MC_ADVANCE_RIP();
3814 } IEM_MC_ENDIF();
3815 IEM_MC_END();
3816 }
3817 return VINF_SUCCESS;
3818}
3819
3820
3821/** Opcode 0x0f 0x8f. */
3822FNIEMOP_DEF(iemOp_jnle_Jv)
3823{
3824 IEMOP_MNEMONIC("jnle/jg Jv");
3825 IEMOP_HLP_MIN_386();
3826 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3827 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3828 {
3829 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3830 IEMOP_HLP_NO_LOCK_PREFIX();
3831
3832 IEM_MC_BEGIN(0, 0);
3833 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3834 IEM_MC_ADVANCE_RIP();
3835 } IEM_MC_ELSE() {
3836 IEM_MC_REL_JMP_S16(i16Imm);
3837 } IEM_MC_ENDIF();
3838 IEM_MC_END();
3839 }
3840 else
3841 {
3842 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3843 IEMOP_HLP_NO_LOCK_PREFIX();
3844
3845 IEM_MC_BEGIN(0, 0);
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_ADVANCE_RIP();
3848 } IEM_MC_ELSE() {
3849 IEM_MC_REL_JMP_S32(i32Imm);
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852 }
3853 return VINF_SUCCESS;
3854}
3855
3856
3857/** Opcode 0x0f 0x90. */
3858FNIEMOP_DEF(iemOp_seto_Eb)
3859{
3860 IEMOP_MNEMONIC("seto Eb");
3861 IEMOP_HLP_MIN_386();
3862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3863 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3864
3865 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3866 * any way. AMD says it's "unused", whatever that means. We're
3867 * ignoring for now. */
3868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3869 {
3870 /* register target */
3871 IEM_MC_BEGIN(0, 0);
3872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3873 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3874 } IEM_MC_ELSE() {
3875 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3876 } IEM_MC_ENDIF();
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 }
3880 else
3881 {
3882 /* memory target */
3883 IEM_MC_BEGIN(0, 1);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3887 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3890 } IEM_MC_ENDIF();
3891 IEM_MC_ADVANCE_RIP();
3892 IEM_MC_END();
3893 }
3894 return VINF_SUCCESS;
3895}
3896
3897
3898/** Opcode 0x0f 0x91. */
3899FNIEMOP_DEF(iemOp_setno_Eb)
3900{
3901 IEMOP_MNEMONIC("setno Eb");
3902 IEMOP_HLP_MIN_386();
3903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3904 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3905
3906 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3907 * any way. AMD says it's "unused", whatever that means. We're
3908 * ignoring for now. */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 /* register target */
3912 IEM_MC_BEGIN(0, 0);
3913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3914 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3915 } IEM_MC_ELSE() {
3916 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3917 } IEM_MC_ENDIF();
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 }
3921 else
3922 {
3923 /* memory target */
3924 IEM_MC_BEGIN(0, 1);
3925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3928 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3929 } IEM_MC_ELSE() {
3930 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3931 } IEM_MC_ENDIF();
3932 IEM_MC_ADVANCE_RIP();
3933 IEM_MC_END();
3934 }
3935 return VINF_SUCCESS;
3936}
3937
3938
3939/** Opcode 0x0f 0x92. */
3940FNIEMOP_DEF(iemOp_setc_Eb)
3941{
3942 IEMOP_MNEMONIC("setc Eb");
3943 IEMOP_HLP_MIN_386();
3944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3945 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3946
3947 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3948 * any way. AMD says it's "unused", whatever that means. We're
3949 * ignoring for now. */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 /* register target */
3953 IEM_MC_BEGIN(0, 0);
3954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3955 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3956 } IEM_MC_ELSE() {
3957 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3958 } IEM_MC_ENDIF();
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 /* memory target */
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3968 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3969 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3970 } IEM_MC_ELSE() {
3971 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3972 } IEM_MC_ENDIF();
3973 IEM_MC_ADVANCE_RIP();
3974 IEM_MC_END();
3975 }
3976 return VINF_SUCCESS;
3977}
3978
3979
3980/** Opcode 0x0f 0x93. */
3981FNIEMOP_DEF(iemOp_setnc_Eb)
3982{
3983 IEMOP_MNEMONIC("setnc Eb");
3984 IEMOP_HLP_MIN_386();
3985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3986 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3987
3988 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3989 * any way. AMD says it's "unused", whatever that means. We're
3990 * ignoring for now. */
3991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3992 {
3993 /* register target */
3994 IEM_MC_BEGIN(0, 0);
3995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3996 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3997 } IEM_MC_ELSE() {
3998 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3999 } IEM_MC_ENDIF();
4000 IEM_MC_ADVANCE_RIP();
4001 IEM_MC_END();
4002 }
4003 else
4004 {
4005 /* memory target */
4006 IEM_MC_BEGIN(0, 1);
4007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4010 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4011 } IEM_MC_ELSE() {
4012 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4013 } IEM_MC_ENDIF();
4014 IEM_MC_ADVANCE_RIP();
4015 IEM_MC_END();
4016 }
4017 return VINF_SUCCESS;
4018}
4019
4020
4021/** Opcode 0x0f 0x94. */
4022FNIEMOP_DEF(iemOp_sete_Eb)
4023{
4024 IEMOP_MNEMONIC("sete Eb");
4025 IEMOP_HLP_MIN_386();
4026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4027 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4028
4029 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4030 * any way. AMD says it's "unused", whatever that means. We're
4031 * ignoring for now. */
4032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4033 {
4034 /* register target */
4035 IEM_MC_BEGIN(0, 0);
4036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4038 } IEM_MC_ELSE() {
4039 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4040 } IEM_MC_ENDIF();
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 }
4044 else
4045 {
4046 /* memory target */
4047 IEM_MC_BEGIN(0, 1);
4048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4051 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4052 } IEM_MC_ELSE() {
4053 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4054 } IEM_MC_ENDIF();
4055 IEM_MC_ADVANCE_RIP();
4056 IEM_MC_END();
4057 }
4058 return VINF_SUCCESS;
4059}
4060
4061
4062/** Opcode 0x0f 0x95. */
4063FNIEMOP_DEF(iemOp_setne_Eb)
4064{
4065 IEMOP_MNEMONIC("setne Eb");
4066 IEMOP_HLP_MIN_386();
4067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4068 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4069
4070 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4071 * any way. AMD says it's "unused", whatever that means. We're
4072 * ignoring for now. */
4073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4074 {
4075 /* register target */
4076 IEM_MC_BEGIN(0, 0);
4077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4079 } IEM_MC_ELSE() {
4080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4081 } IEM_MC_ENDIF();
4082 IEM_MC_ADVANCE_RIP();
4083 IEM_MC_END();
4084 }
4085 else
4086 {
4087 /* memory target */
4088 IEM_MC_BEGIN(0, 1);
4089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4092 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4093 } IEM_MC_ELSE() {
4094 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4095 } IEM_MC_ENDIF();
4096 IEM_MC_ADVANCE_RIP();
4097 IEM_MC_END();
4098 }
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** Opcode 0x0f 0x96. */
4104FNIEMOP_DEF(iemOp_setbe_Eb)
4105{
4106 IEMOP_MNEMONIC("setbe Eb");
4107 IEMOP_HLP_MIN_386();
4108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4109 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4110
4111 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4112 * any way. AMD says it's "unused", whatever that means. We're
4113 * ignoring for now. */
4114 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4115 {
4116 /* register target */
4117 IEM_MC_BEGIN(0, 0);
4118 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4119 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4120 } IEM_MC_ELSE() {
4121 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4122 } IEM_MC_ENDIF();
4123 IEM_MC_ADVANCE_RIP();
4124 IEM_MC_END();
4125 }
4126 else
4127 {
4128 /* memory target */
4129 IEM_MC_BEGIN(0, 1);
4130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4132 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4133 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4134 } IEM_MC_ELSE() {
4135 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4136 } IEM_MC_ENDIF();
4137 IEM_MC_ADVANCE_RIP();
4138 IEM_MC_END();
4139 }
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** Opcode 0x0f 0x97. */
4145FNIEMOP_DEF(iemOp_setnbe_Eb)
4146{
4147 IEMOP_MNEMONIC("setnbe Eb");
4148 IEMOP_HLP_MIN_386();
4149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4150 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4151
4152 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4153 * any way. AMD says it's "unused", whatever that means. We're
4154 * ignoring for now. */
4155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4156 {
4157 /* register target */
4158 IEM_MC_BEGIN(0, 0);
4159 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4161 } IEM_MC_ELSE() {
4162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 else
4168 {
4169 /* memory target */
4170 IEM_MC_BEGIN(0, 1);
4171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4173 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4174 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4175 } IEM_MC_ELSE() {
4176 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4177 } IEM_MC_ENDIF();
4178 IEM_MC_ADVANCE_RIP();
4179 IEM_MC_END();
4180 }
4181 return VINF_SUCCESS;
4182}
4183
4184
4185/** Opcode 0x0f 0x98. */
4186FNIEMOP_DEF(iemOp_sets_Eb)
4187{
4188 IEMOP_MNEMONIC("sets Eb");
4189 IEMOP_HLP_MIN_386();
4190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4191 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4192
4193 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4194 * any way. AMD says it's "unused", whatever that means. We're
4195 * ignoring for now. */
4196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4197 {
4198 /* register target */
4199 IEM_MC_BEGIN(0, 0);
4200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4201 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4204 } IEM_MC_ENDIF();
4205 IEM_MC_ADVANCE_RIP();
4206 IEM_MC_END();
4207 }
4208 else
4209 {
4210 /* memory target */
4211 IEM_MC_BEGIN(0, 1);
4212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4215 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4216 } IEM_MC_ELSE() {
4217 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4218 } IEM_MC_ENDIF();
4219 IEM_MC_ADVANCE_RIP();
4220 IEM_MC_END();
4221 }
4222 return VINF_SUCCESS;
4223}
4224
4225
4226/** Opcode 0x0f 0x99. */
4227FNIEMOP_DEF(iemOp_setns_Eb)
4228{
4229 IEMOP_MNEMONIC("setns Eb");
4230 IEMOP_HLP_MIN_386();
4231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4232 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4233
4234 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4235 * any way. AMD says it's "unused", whatever that means. We're
4236 * ignoring for now. */
4237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4238 {
4239 /* register target */
4240 IEM_MC_BEGIN(0, 0);
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4242 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4243 } IEM_MC_ELSE() {
4244 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4245 } IEM_MC_ENDIF();
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 else
4250 {
4251 /* memory target */
4252 IEM_MC_BEGIN(0, 1);
4253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4256 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4257 } IEM_MC_ELSE() {
4258 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4259 } IEM_MC_ENDIF();
4260 IEM_MC_ADVANCE_RIP();
4261 IEM_MC_END();
4262 }
4263 return VINF_SUCCESS;
4264}
4265
4266
4267/** Opcode 0x0f 0x9a. */
4268FNIEMOP_DEF(iemOp_setp_Eb)
4269{
4270 IEMOP_MNEMONIC("setnp Eb");
4271 IEMOP_HLP_MIN_386();
4272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4273 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4274
4275 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4276 * any way. AMD says it's "unused", whatever that means. We're
4277 * ignoring for now. */
4278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4279 {
4280 /* register target */
4281 IEM_MC_BEGIN(0, 0);
4282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4283 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4284 } IEM_MC_ELSE() {
4285 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4286 } IEM_MC_ENDIF();
4287 IEM_MC_ADVANCE_RIP();
4288 IEM_MC_END();
4289 }
4290 else
4291 {
4292 /* memory target */
4293 IEM_MC_BEGIN(0, 1);
4294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4297 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4298 } IEM_MC_ELSE() {
4299 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4300 } IEM_MC_ENDIF();
4301 IEM_MC_ADVANCE_RIP();
4302 IEM_MC_END();
4303 }
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/** Opcode 0x0f 0x9b. */
4309FNIEMOP_DEF(iemOp_setnp_Eb)
4310{
4311 IEMOP_MNEMONIC("setnp Eb");
4312 IEMOP_HLP_MIN_386();
4313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4314 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4315
4316 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4317 * any way. AMD says it's "unused", whatever that means. We're
4318 * ignoring for now. */
4319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4320 {
4321 /* register target */
4322 IEM_MC_BEGIN(0, 0);
4323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4324 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4325 } IEM_MC_ELSE() {
4326 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4327 } IEM_MC_ENDIF();
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 }
4331 else
4332 {
4333 /* memory target */
4334 IEM_MC_BEGIN(0, 1);
4335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4338 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4339 } IEM_MC_ELSE() {
4340 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4341 } IEM_MC_ENDIF();
4342 IEM_MC_ADVANCE_RIP();
4343 IEM_MC_END();
4344 }
4345 return VINF_SUCCESS;
4346}
4347
4348
4349/** Opcode 0x0f 0x9c. */
4350FNIEMOP_DEF(iemOp_setl_Eb)
4351{
4352 IEMOP_MNEMONIC("setl Eb");
4353 IEMOP_HLP_MIN_386();
4354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4355 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4356
4357 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4358 * any way. AMD says it's "unused", whatever that means. We're
4359 * ignoring for now. */
4360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4361 {
4362 /* register target */
4363 IEM_MC_BEGIN(0, 0);
4364 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4365 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4366 } IEM_MC_ELSE() {
4367 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4368 } IEM_MC_ENDIF();
4369 IEM_MC_ADVANCE_RIP();
4370 IEM_MC_END();
4371 }
4372 else
4373 {
4374 /* memory target */
4375 IEM_MC_BEGIN(0, 1);
4376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4378 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4379 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4380 } IEM_MC_ELSE() {
4381 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4382 } IEM_MC_ENDIF();
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 }
4386 return VINF_SUCCESS;
4387}
4388
4389
4390/** Opcode 0x0f 0x9d. */
4391FNIEMOP_DEF(iemOp_setnl_Eb)
4392{
4393 IEMOP_MNEMONIC("setnl Eb");
4394 IEMOP_HLP_MIN_386();
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4397
4398 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4399 * any way. AMD says it's "unused", whatever that means. We're
4400 * ignoring for now. */
4401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4402 {
4403 /* register target */
4404 IEM_MC_BEGIN(0, 0);
4405 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4406 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4407 } IEM_MC_ELSE() {
4408 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4409 } IEM_MC_ENDIF();
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 /* memory target */
4416 IEM_MC_BEGIN(0, 1);
4417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4419 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4420 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4421 } IEM_MC_ELSE() {
4422 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4423 } IEM_MC_ENDIF();
4424 IEM_MC_ADVANCE_RIP();
4425 IEM_MC_END();
4426 }
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/** Opcode 0x0f 0x9e. */
4432FNIEMOP_DEF(iemOp_setle_Eb)
4433{
4434 IEMOP_MNEMONIC("setle Eb");
4435 IEMOP_HLP_MIN_386();
4436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4437 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4438
4439 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4440 * any way. AMD says it's "unused", whatever that means. We're
4441 * ignoring for now. */
4442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4443 {
4444 /* register target */
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4447 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4448 } IEM_MC_ELSE() {
4449 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 else
4455 {
4456 /* memory target */
4457 IEM_MC_BEGIN(0, 1);
4458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4460 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4461 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4462 } IEM_MC_ELSE() {
4463 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4464 } IEM_MC_ENDIF();
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 }
4468 return VINF_SUCCESS;
4469}
4470
4471
4472/** Opcode 0x0f 0x9f. */
4473FNIEMOP_DEF(iemOp_setnle_Eb)
4474{
4475 IEMOP_MNEMONIC("setnle Eb");
4476 IEMOP_HLP_MIN_386();
4477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4478 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4479
4480 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4481 * any way. AMD says it's "unused", whatever that means. We're
4482 * ignoring for now. */
4483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4484 {
4485 /* register target */
4486 IEM_MC_BEGIN(0, 0);
4487 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4488 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4489 } IEM_MC_ELSE() {
4490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4491 } IEM_MC_ENDIF();
4492 IEM_MC_ADVANCE_RIP();
4493 IEM_MC_END();
4494 }
4495 else
4496 {
4497 /* memory target */
4498 IEM_MC_BEGIN(0, 1);
4499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4501 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4502 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4503 } IEM_MC_ELSE() {
4504 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4505 } IEM_MC_ENDIF();
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 }
4509 return VINF_SUCCESS;
4510}
4511
4512
4513/**
4514 * Common 'push segment-register' helper.
4515 */
4516FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4517{
4518 IEMOP_HLP_NO_LOCK_PREFIX();
4519 if (iReg < X86_SREG_FS)
4520 IEMOP_HLP_NO_64BIT();
4521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4522
4523 switch (pIemCpu->enmEffOpSize)
4524 {
4525 case IEMMODE_16BIT:
4526 IEM_MC_BEGIN(0, 1);
4527 IEM_MC_LOCAL(uint16_t, u16Value);
4528 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4529 IEM_MC_PUSH_U16(u16Value);
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 break;
4533
4534 case IEMMODE_32BIT:
4535 IEM_MC_BEGIN(0, 1);
4536 IEM_MC_LOCAL(uint32_t, u32Value);
4537 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4538 IEM_MC_PUSH_U32_SREG(u32Value);
4539 IEM_MC_ADVANCE_RIP();
4540 IEM_MC_END();
4541 break;
4542
4543 case IEMMODE_64BIT:
4544 IEM_MC_BEGIN(0, 1);
4545 IEM_MC_LOCAL(uint64_t, u64Value);
4546 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4547 IEM_MC_PUSH_U64(u64Value);
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 break;
4551 }
4552
4553 return VINF_SUCCESS;
4554}
4555
4556
4557/** Opcode 0x0f 0xa0. */
4558FNIEMOP_DEF(iemOp_push_fs)
4559{
4560 IEMOP_MNEMONIC("push fs");
4561 IEMOP_HLP_MIN_386();
4562 IEMOP_HLP_NO_LOCK_PREFIX();
4563 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4564}
4565
4566
4567/** Opcode 0x0f 0xa1. */
4568FNIEMOP_DEF(iemOp_pop_fs)
4569{
4570 IEMOP_MNEMONIC("pop fs");
4571 IEMOP_HLP_MIN_386();
4572 IEMOP_HLP_NO_LOCK_PREFIX();
4573 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4574}
4575
4576
4577/** Opcode 0x0f 0xa2. */
4578FNIEMOP_DEF(iemOp_cpuid)
4579{
4580 IEMOP_MNEMONIC("cpuid");
4581 IEMOP_HLP_MIN_486(); /* not all 486es. */
4582 IEMOP_HLP_NO_LOCK_PREFIX();
4583 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4584}
4585
4586
4587/**
4588 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4589 * iemOp_bts_Ev_Gv.
4590 */
4591FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4592{
4593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4595
4596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4597 {
4598 /* register destination. */
4599 IEMOP_HLP_NO_LOCK_PREFIX();
4600 switch (pIemCpu->enmEffOpSize)
4601 {
4602 case IEMMODE_16BIT:
4603 IEM_MC_BEGIN(3, 0);
4604 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4605 IEM_MC_ARG(uint16_t, u16Src, 1);
4606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4607
4608 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4609 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4610 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4611 IEM_MC_REF_EFLAGS(pEFlags);
4612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4613
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 return VINF_SUCCESS;
4617
4618 case IEMMODE_32BIT:
4619 IEM_MC_BEGIN(3, 0);
4620 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4621 IEM_MC_ARG(uint32_t, u32Src, 1);
4622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4623
4624 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4625 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4626 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4627 IEM_MC_REF_EFLAGS(pEFlags);
4628 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4629
4630 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 return VINF_SUCCESS;
4634
4635 case IEMMODE_64BIT:
4636 IEM_MC_BEGIN(3, 0);
4637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4638 IEM_MC_ARG(uint64_t, u64Src, 1);
4639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4640
4641 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4642 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4643 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4644 IEM_MC_REF_EFLAGS(pEFlags);
4645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4646
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 return VINF_SUCCESS;
4650
4651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4652 }
4653 }
4654 else
4655 {
4656 /* memory destination. */
4657
4658 uint32_t fAccess;
4659 if (pImpl->pfnLockedU16)
4660 fAccess = IEM_ACCESS_DATA_RW;
4661 else /* BT */
4662 {
4663 IEMOP_HLP_NO_LOCK_PREFIX();
4664 fAccess = IEM_ACCESS_DATA_R;
4665 }
4666
4667 NOREF(fAccess);
4668
4669 /** @todo test negative bit offsets! */
4670 switch (pIemCpu->enmEffOpSize)
4671 {
4672 case IEMMODE_16BIT:
4673 IEM_MC_BEGIN(3, 2);
4674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4675 IEM_MC_ARG(uint16_t, u16Src, 1);
4676 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4678 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4679
4680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4681 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4682 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4683 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4684 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4685 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4686 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4687 IEM_MC_FETCH_EFLAGS(EFlags);
4688
4689 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4690 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4692 else
4693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4694 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4695
4696 IEM_MC_COMMIT_EFLAGS(EFlags);
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 return VINF_SUCCESS;
4700
4701 case IEMMODE_32BIT:
4702 IEM_MC_BEGIN(3, 2);
4703 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4704 IEM_MC_ARG(uint32_t, u32Src, 1);
4705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4707 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4708
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4710 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4711 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4712 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4713 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4714 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4715 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4716 IEM_MC_FETCH_EFLAGS(EFlags);
4717
4718 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4719 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4721 else
4722 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4724
4725 IEM_MC_COMMIT_EFLAGS(EFlags);
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 return VINF_SUCCESS;
4729
4730 case IEMMODE_64BIT:
4731 IEM_MC_BEGIN(3, 2);
4732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4733 IEM_MC_ARG(uint64_t, u64Src, 1);
4734 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4736 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4737
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4739 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4740 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4741 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4742 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4743 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4744 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4745 IEM_MC_FETCH_EFLAGS(EFlags);
4746
4747 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4748 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4750 else
4751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4753
4754 IEM_MC_COMMIT_EFLAGS(EFlags);
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 return VINF_SUCCESS;
4758
4759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4760 }
4761 }
4762}
4763
4764
4765/** Opcode 0x0f 0xa3. */
4766FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4767{
4768 IEMOP_MNEMONIC("bt Gv,Gv");
4769 IEMOP_HLP_MIN_386();
4770 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4771}
4772
4773
4774/**
4775 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4776 */
4777FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4778{
4779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4780 IEMOP_HLP_NO_LOCK_PREFIX();
4781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4782
4783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4784 {
4785 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4786 IEMOP_HLP_NO_LOCK_PREFIX();
4787
4788 switch (pIemCpu->enmEffOpSize)
4789 {
4790 case IEMMODE_16BIT:
4791 IEM_MC_BEGIN(4, 0);
4792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4793 IEM_MC_ARG(uint16_t, u16Src, 1);
4794 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4795 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4796
4797 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4798 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4799 IEM_MC_REF_EFLAGS(pEFlags);
4800 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4801
4802 IEM_MC_ADVANCE_RIP();
4803 IEM_MC_END();
4804 return VINF_SUCCESS;
4805
4806 case IEMMODE_32BIT:
4807 IEM_MC_BEGIN(4, 0);
4808 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4809 IEM_MC_ARG(uint32_t, u32Src, 1);
4810 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4811 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4812
4813 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4814 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4815 IEM_MC_REF_EFLAGS(pEFlags);
4816 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4817
4818 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4819 IEM_MC_ADVANCE_RIP();
4820 IEM_MC_END();
4821 return VINF_SUCCESS;
4822
4823 case IEMMODE_64BIT:
4824 IEM_MC_BEGIN(4, 0);
4825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4826 IEM_MC_ARG(uint64_t, u64Src, 1);
4827 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4828 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4829
4830 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4831 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4832 IEM_MC_REF_EFLAGS(pEFlags);
4833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4834
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 return VINF_SUCCESS;
4838
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4840 }
4841 }
4842 else
4843 {
4844 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4845
4846 switch (pIemCpu->enmEffOpSize)
4847 {
4848 case IEMMODE_16BIT:
4849 IEM_MC_BEGIN(4, 2);
4850 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4851 IEM_MC_ARG(uint16_t, u16Src, 1);
4852 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4855
4856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4857 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4858 IEM_MC_ASSIGN(cShiftArg, cShift);
4859 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4860 IEM_MC_FETCH_EFLAGS(EFlags);
4861 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4862 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4863
4864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4865 IEM_MC_COMMIT_EFLAGS(EFlags);
4866 IEM_MC_ADVANCE_RIP();
4867 IEM_MC_END();
4868 return VINF_SUCCESS;
4869
4870 case IEMMODE_32BIT:
4871 IEM_MC_BEGIN(4, 2);
4872 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4873 IEM_MC_ARG(uint32_t, u32Src, 1);
4874 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4875 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4877
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4879 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4880 IEM_MC_ASSIGN(cShiftArg, cShift);
4881 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4882 IEM_MC_FETCH_EFLAGS(EFlags);
4883 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4884 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4885
4886 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4887 IEM_MC_COMMIT_EFLAGS(EFlags);
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 return VINF_SUCCESS;
4891
4892 case IEMMODE_64BIT:
4893 IEM_MC_BEGIN(4, 2);
4894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4895 IEM_MC_ARG(uint64_t, u64Src, 1);
4896 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4899
4900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4901 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4902 IEM_MC_ASSIGN(cShiftArg, cShift);
4903 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4904 IEM_MC_FETCH_EFLAGS(EFlags);
4905 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4906 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4907
4908 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4909 IEM_MC_COMMIT_EFLAGS(EFlags);
4910 IEM_MC_ADVANCE_RIP();
4911 IEM_MC_END();
4912 return VINF_SUCCESS;
4913
4914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4915 }
4916 }
4917}
4918
4919
4920/**
4921 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4922 */
4923FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4924{
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926 IEMOP_HLP_NO_LOCK_PREFIX();
4927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4928
4929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4930 {
4931 IEMOP_HLP_NO_LOCK_PREFIX();
4932
4933 switch (pIemCpu->enmEffOpSize)
4934 {
4935 case IEMMODE_16BIT:
4936 IEM_MC_BEGIN(4, 0);
4937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4938 IEM_MC_ARG(uint16_t, u16Src, 1);
4939 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4940 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4941
4942 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4943 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4944 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4945 IEM_MC_REF_EFLAGS(pEFlags);
4946 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4947
4948 IEM_MC_ADVANCE_RIP();
4949 IEM_MC_END();
4950 return VINF_SUCCESS;
4951
4952 case IEMMODE_32BIT:
4953 IEM_MC_BEGIN(4, 0);
4954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4955 IEM_MC_ARG(uint32_t, u32Src, 1);
4956 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4957 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4958
4959 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4960 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4961 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4962 IEM_MC_REF_EFLAGS(pEFlags);
4963 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4964
4965 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4966 IEM_MC_ADVANCE_RIP();
4967 IEM_MC_END();
4968 return VINF_SUCCESS;
4969
4970 case IEMMODE_64BIT:
4971 IEM_MC_BEGIN(4, 0);
4972 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4973 IEM_MC_ARG(uint64_t, u64Src, 1);
4974 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4975 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4976
4977 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4978 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4979 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4980 IEM_MC_REF_EFLAGS(pEFlags);
4981 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4982
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 return VINF_SUCCESS;
4986
4987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4988 }
4989 }
4990 else
4991 {
4992 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4993
4994 switch (pIemCpu->enmEffOpSize)
4995 {
4996 case IEMMODE_16BIT:
4997 IEM_MC_BEGIN(4, 2);
4998 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4999 IEM_MC_ARG(uint16_t, u16Src, 1);
5000 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5003
5004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5005 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5006 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5007 IEM_MC_FETCH_EFLAGS(EFlags);
5008 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5009 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5010
5011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5012 IEM_MC_COMMIT_EFLAGS(EFlags);
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 return VINF_SUCCESS;
5016
5017 case IEMMODE_32BIT:
5018 IEM_MC_BEGIN(4, 2);
5019 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5020 IEM_MC_ARG(uint32_t, u32Src, 1);
5021 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5022 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5024
5025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5026 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5027 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5028 IEM_MC_FETCH_EFLAGS(EFlags);
5029 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5030 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5031
5032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5033 IEM_MC_COMMIT_EFLAGS(EFlags);
5034 IEM_MC_ADVANCE_RIP();
5035 IEM_MC_END();
5036 return VINF_SUCCESS;
5037
5038 case IEMMODE_64BIT:
5039 IEM_MC_BEGIN(4, 2);
5040 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5041 IEM_MC_ARG(uint64_t, u64Src, 1);
5042 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5045
5046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5047 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5048 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5049 IEM_MC_FETCH_EFLAGS(EFlags);
5050 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5051 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5052
5053 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5054 IEM_MC_COMMIT_EFLAGS(EFlags);
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 return VINF_SUCCESS;
5058
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 }
5062}
5063
5064
5065
5066/** Opcode 0x0f 0xa4. */
5067FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5068{
5069 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5070 IEMOP_HLP_MIN_386();
5071 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5072}
5073
5074
5075/** Opcode 0x0f 0xa5. */
5076FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5077{
5078 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5079 IEMOP_HLP_MIN_386();
5080 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5081}
5082
5083
5084/** Opcode 0x0f 0xa8. */
5085FNIEMOP_DEF(iemOp_push_gs)
5086{
5087 IEMOP_MNEMONIC("push gs");
5088 IEMOP_HLP_MIN_386();
5089 IEMOP_HLP_NO_LOCK_PREFIX();
5090 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5091}
5092
5093
5094/** Opcode 0x0f 0xa9. */
5095FNIEMOP_DEF(iemOp_pop_gs)
5096{
5097 IEMOP_MNEMONIC("pop gs");
5098 IEMOP_HLP_MIN_386();
5099 IEMOP_HLP_NO_LOCK_PREFIX();
5100 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5101}
5102
5103
5104/** Opcode 0x0f 0xaa. */
5105FNIEMOP_STUB(iemOp_rsm);
5106//IEMOP_HLP_MIN_386();
5107
5108
5109/** Opcode 0x0f 0xab. */
5110FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5111{
5112 IEMOP_MNEMONIC("bts Ev,Gv");
5113 IEMOP_HLP_MIN_386();
5114 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5115}
5116
5117
5118/** Opcode 0x0f 0xac. */
5119FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5120{
5121 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5122 IEMOP_HLP_MIN_386();
5123 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5124}
5125
5126
5127/** Opcode 0x0f 0xad. */
5128FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5129{
5130 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5131 IEMOP_HLP_MIN_386();
5132 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5133}
5134
5135
5136/** Opcode 0x0f 0xae mem/0. */
5137FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5138{
5139 IEMOP_MNEMONIC("fxsave m512");
5140 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5141 return IEMOP_RAISE_INVALID_OPCODE();
5142
5143 IEM_MC_BEGIN(3, 1);
5144 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5145 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5146 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5150 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5151 IEM_MC_END();
5152 return VINF_SUCCESS;
5153}
5154
5155
5156/** Opcode 0x0f 0xae mem/1. */
5157FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5158{
5159 IEMOP_MNEMONIC("fxrstor m512");
5160 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5161 return IEMOP_RAISE_INVALID_OPCODE();
5162
5163 IEM_MC_BEGIN(3, 1);
5164 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5165 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5166 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5169 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5170 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5171 IEM_MC_END();
5172 return VINF_SUCCESS;
5173}
5174
5175
5176/** Opcode 0x0f 0xae mem/2. */
5177FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5178
5179/** Opcode 0x0f 0xae mem/3. */
5180FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5181
5182/** Opcode 0x0f 0xae mem/4. */
5183FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5184
5185/** Opcode 0x0f 0xae mem/5. */
5186FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5187
5188/** Opcode 0x0f 0xae mem/6. */
5189FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5190
5191/** Opcode 0x0f 0xae mem/7. */
5192FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5193
5194
5195/** Opcode 0x0f 0xae 11b/5. */
5196FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5197{
5198 IEMOP_MNEMONIC("lfence");
5199 IEMOP_HLP_NO_LOCK_PREFIX();
5200 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5201 return IEMOP_RAISE_INVALID_OPCODE();
5202
5203 IEM_MC_BEGIN(0, 0);
5204 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5205 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5206 else
5207 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 return VINF_SUCCESS;
5211}
5212
5213
5214/** Opcode 0x0f 0xae 11b/6. */
5215FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5216{
5217 IEMOP_MNEMONIC("mfence");
5218 IEMOP_HLP_NO_LOCK_PREFIX();
5219 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5220 return IEMOP_RAISE_INVALID_OPCODE();
5221
5222 IEM_MC_BEGIN(0, 0);
5223 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5224 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5225 else
5226 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5227 IEM_MC_ADVANCE_RIP();
5228 IEM_MC_END();
5229 return VINF_SUCCESS;
5230}
5231
5232
5233/** Opcode 0x0f 0xae 11b/7. */
5234FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5235{
5236 IEMOP_MNEMONIC("sfence");
5237 IEMOP_HLP_NO_LOCK_PREFIX();
5238 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5239 return IEMOP_RAISE_INVALID_OPCODE();
5240
5241 IEM_MC_BEGIN(0, 0);
5242 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5243 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5244 else
5245 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5246 IEM_MC_ADVANCE_RIP();
5247 IEM_MC_END();
5248 return VINF_SUCCESS;
5249}
5250
5251
5252/** Opcode 0xf3 0x0f 0xae 11b/0. */
5253FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5254
5255/** Opcode 0xf3 0x0f 0xae 11b/1. */
5256FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5257
5258/** Opcode 0xf3 0x0f 0xae 11b/2. */
5259FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5260
5261/** Opcode 0xf3 0x0f 0xae 11b/3. */
5262FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5263
5264
5265/** Opcode 0x0f 0xae. */
5266FNIEMOP_DEF(iemOp_Grp15)
5267{
5268 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5270 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5271 {
5272 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5273 {
5274 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5275 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5276 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5277 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5278 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5279 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5280 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5281 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5283 }
5284 }
5285 else
5286 {
5287 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5288 {
5289 case 0:
5290 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5291 {
5292 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5293 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5294 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5295 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5296 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5297 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5298 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5299 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5301 }
5302 break;
5303
5304 case IEM_OP_PRF_REPZ:
5305 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5306 {
5307 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5308 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5309 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5310 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5311 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5312 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5313 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5314 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 break;
5318
5319 default:
5320 return IEMOP_RAISE_INVALID_OPCODE();
5321 }
5322 }
5323}
5324
5325
5326/** Opcode 0x0f 0xaf. */
5327FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5328{
5329 IEMOP_MNEMONIC("imul Gv,Ev");
5330 IEMOP_HLP_MIN_386();
5331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5333}
5334
5335
5336/** Opcode 0x0f 0xb0. */
5337FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5338{
5339 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5340 IEMOP_HLP_MIN_486();
5341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5342
5343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5344 {
5345 IEMOP_HLP_DONE_DECODING();
5346 IEM_MC_BEGIN(4, 0);
5347 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5348 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5349 IEM_MC_ARG(uint8_t, u8Src, 2);
5350 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5351
5352 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5353 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5354 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5355 IEM_MC_REF_EFLAGS(pEFlags);
5356 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5358 else
5359 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5360
5361 IEM_MC_ADVANCE_RIP();
5362 IEM_MC_END();
5363 }
5364 else
5365 {
5366 IEM_MC_BEGIN(4, 3);
5367 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5368 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5369 IEM_MC_ARG(uint8_t, u8Src, 2);
5370 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5372 IEM_MC_LOCAL(uint8_t, u8Al);
5373
5374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5375 IEMOP_HLP_DONE_DECODING();
5376 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5377 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5378 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5379 IEM_MC_FETCH_EFLAGS(EFlags);
5380 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5381 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5383 else
5384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5385
5386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5387 IEM_MC_COMMIT_EFLAGS(EFlags);
5388 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5389 IEM_MC_ADVANCE_RIP();
5390 IEM_MC_END();
5391 }
5392 return VINF_SUCCESS;
5393}
5394
5395/** Opcode 0x0f 0xb1. */
5396FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5397{
5398 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5399 IEMOP_HLP_MIN_486();
5400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5401
5402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5403 {
5404 IEMOP_HLP_DONE_DECODING();
5405 switch (pIemCpu->enmEffOpSize)
5406 {
5407 case IEMMODE_16BIT:
5408 IEM_MC_BEGIN(4, 0);
5409 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5410 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5411 IEM_MC_ARG(uint16_t, u16Src, 2);
5412 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5413
5414 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5416 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5417 IEM_MC_REF_EFLAGS(pEFlags);
5418 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5419 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5420 else
5421 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5422
5423 IEM_MC_ADVANCE_RIP();
5424 IEM_MC_END();
5425 return VINF_SUCCESS;
5426
5427 case IEMMODE_32BIT:
5428 IEM_MC_BEGIN(4, 0);
5429 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5430 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5431 IEM_MC_ARG(uint32_t, u32Src, 2);
5432 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5433
5434 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5435 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5436 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5437 IEM_MC_REF_EFLAGS(pEFlags);
5438 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5440 else
5441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5442
5443 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5444 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 return VINF_SUCCESS;
5448
5449 case IEMMODE_64BIT:
5450 IEM_MC_BEGIN(4, 0);
5451 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5452 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5453#ifdef RT_ARCH_X86
5454 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5455#else
5456 IEM_MC_ARG(uint64_t, u64Src, 2);
5457#endif
5458 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5459
5460 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5461 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5462 IEM_MC_REF_EFLAGS(pEFlags);
5463#ifdef RT_ARCH_X86
5464 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5465 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5466 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5467 else
5468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5469#else
5470 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5471 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5472 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5473 else
5474 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5475#endif
5476
5477 IEM_MC_ADVANCE_RIP();
5478 IEM_MC_END();
5479 return VINF_SUCCESS;
5480
5481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5482 }
5483 }
5484 else
5485 {
5486 switch (pIemCpu->enmEffOpSize)
5487 {
5488 case IEMMODE_16BIT:
5489 IEM_MC_BEGIN(4, 3);
5490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5491 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5492 IEM_MC_ARG(uint16_t, u16Src, 2);
5493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5495 IEM_MC_LOCAL(uint16_t, u16Ax);
5496
5497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5498 IEMOP_HLP_DONE_DECODING();
5499 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5500 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5501 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5502 IEM_MC_FETCH_EFLAGS(EFlags);
5503 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5504 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5506 else
5507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5508
5509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5510 IEM_MC_COMMIT_EFLAGS(EFlags);
5511 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5512 IEM_MC_ADVANCE_RIP();
5513 IEM_MC_END();
5514 return VINF_SUCCESS;
5515
5516 case IEMMODE_32BIT:
5517 IEM_MC_BEGIN(4, 3);
5518 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5519 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5520 IEM_MC_ARG(uint32_t, u32Src, 2);
5521 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5523 IEM_MC_LOCAL(uint32_t, u32Eax);
5524
5525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5526 IEMOP_HLP_DONE_DECODING();
5527 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5528 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5529 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5530 IEM_MC_FETCH_EFLAGS(EFlags);
5531 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5532 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5533 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5534 else
5535 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5536
5537 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5538 IEM_MC_COMMIT_EFLAGS(EFlags);
5539 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 return VINF_SUCCESS;
5543
5544 case IEMMODE_64BIT:
5545 IEM_MC_BEGIN(4, 3);
5546 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5547 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5548#ifdef RT_ARCH_X86
5549 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5550#else
5551 IEM_MC_ARG(uint64_t, u64Src, 2);
5552#endif
5553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5555 IEM_MC_LOCAL(uint64_t, u64Rax);
5556
5557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5558 IEMOP_HLP_DONE_DECODING();
5559 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5560 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5561 IEM_MC_FETCH_EFLAGS(EFlags);
5562 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5563#ifdef RT_ARCH_X86
5564 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5565 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5566 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5567 else
5568 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5569#else
5570 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5571 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5572 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5573 else
5574 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5575#endif
5576
5577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5578 IEM_MC_COMMIT_EFLAGS(EFlags);
5579 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5580 IEM_MC_ADVANCE_RIP();
5581 IEM_MC_END();
5582 return VINF_SUCCESS;
5583
5584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5585 }
5586 }
5587}
5588
5589
5590FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5591{
5592 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5593 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5594
5595 switch (pIemCpu->enmEffOpSize)
5596 {
5597 case IEMMODE_16BIT:
5598 IEM_MC_BEGIN(5, 1);
5599 IEM_MC_ARG(uint16_t, uSel, 0);
5600 IEM_MC_ARG(uint16_t, offSeg, 1);
5601 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5602 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5603 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5604 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5608 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5609 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5610 IEM_MC_END();
5611 return VINF_SUCCESS;
5612
5613 case IEMMODE_32BIT:
5614 IEM_MC_BEGIN(5, 1);
5615 IEM_MC_ARG(uint16_t, uSel, 0);
5616 IEM_MC_ARG(uint32_t, offSeg, 1);
5617 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5618 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5619 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5620 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5623 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5624 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5625 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5626 IEM_MC_END();
5627 return VINF_SUCCESS;
5628
5629 case IEMMODE_64BIT:
5630 IEM_MC_BEGIN(5, 1);
5631 IEM_MC_ARG(uint16_t, uSel, 0);
5632 IEM_MC_ARG(uint64_t, offSeg, 1);
5633 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5634 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5635 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5636 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5640 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5641 else
5642 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5643 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5644 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5645 IEM_MC_END();
5646 return VINF_SUCCESS;
5647
5648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5649 }
5650}
5651
5652
5653/** Opcode 0x0f 0xb2. */
5654FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5655{
5656 IEMOP_MNEMONIC("lss Gv,Mp");
5657 IEMOP_HLP_MIN_386();
5658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5660 return IEMOP_RAISE_INVALID_OPCODE();
5661 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5662}
5663
5664
5665/** Opcode 0x0f 0xb3. */
5666FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5667{
5668 IEMOP_MNEMONIC("btr Ev,Gv");
5669 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5670}
5671
5672
5673/** Opcode 0x0f 0xb4. */
5674FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5675{
5676 IEMOP_MNEMONIC("lfs Gv,Mp");
5677 IEMOP_HLP_MIN_386();
5678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5680 return IEMOP_RAISE_INVALID_OPCODE();
5681 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5682}
5683
5684
5685/** Opcode 0x0f 0xb5. */
5686FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5687{
5688 IEMOP_MNEMONIC("lgs Gv,Mp");
5689 IEMOP_HLP_MIN_386();
5690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5692 return IEMOP_RAISE_INVALID_OPCODE();
5693 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5694}
5695
5696
5697/** Opcode 0x0f 0xb6. */
5698FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5699{
5700 IEMOP_MNEMONIC("movzx Gv,Eb");
5701 IEMOP_HLP_MIN_386();
5702
5703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5704 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 */
5709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5710 {
5711 switch (pIemCpu->enmEffOpSize)
5712 {
5713 case IEMMODE_16BIT:
5714 IEM_MC_BEGIN(0, 1);
5715 IEM_MC_LOCAL(uint16_t, u16Value);
5716 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5717 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5718 IEM_MC_ADVANCE_RIP();
5719 IEM_MC_END();
5720 return VINF_SUCCESS;
5721
5722 case IEMMODE_32BIT:
5723 IEM_MC_BEGIN(0, 1);
5724 IEM_MC_LOCAL(uint32_t, u32Value);
5725 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5726 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5727 IEM_MC_ADVANCE_RIP();
5728 IEM_MC_END();
5729 return VINF_SUCCESS;
5730
5731 case IEMMODE_64BIT:
5732 IEM_MC_BEGIN(0, 1);
5733 IEM_MC_LOCAL(uint64_t, u64Value);
5734 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 /*
5746 * We're loading a register from memory.
5747 */
5748 switch (pIemCpu->enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 2);
5752 IEM_MC_LOCAL(uint16_t, u16Value);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5755 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5756 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5757 IEM_MC_ADVANCE_RIP();
5758 IEM_MC_END();
5759 return VINF_SUCCESS;
5760
5761 case IEMMODE_32BIT:
5762 IEM_MC_BEGIN(0, 2);
5763 IEM_MC_LOCAL(uint32_t, u32Value);
5764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5766 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5767 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5768 IEM_MC_ADVANCE_RIP();
5769 IEM_MC_END();
5770 return VINF_SUCCESS;
5771
5772 case IEMMODE_64BIT:
5773 IEM_MC_BEGIN(0, 2);
5774 IEM_MC_LOCAL(uint64_t, u64Value);
5775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5777 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5778 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5779 IEM_MC_ADVANCE_RIP();
5780 IEM_MC_END();
5781 return VINF_SUCCESS;
5782
5783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5784 }
5785 }
5786}
5787
5788
5789/** Opcode 0x0f 0xb7. */
5790FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5791{
5792 IEMOP_MNEMONIC("movzx Gv,Ew");
5793 IEMOP_HLP_MIN_386();
5794
5795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5796 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5797
5798 /** @todo Not entirely sure how the operand size prefix is handled here,
5799 * assuming that it will be ignored. Would be nice to have a few
5800 * test for this. */
5801 /*
5802 * If rm is denoting a register, no more instruction bytes.
5803 */
5804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5805 {
5806 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5807 {
5808 IEM_MC_BEGIN(0, 1);
5809 IEM_MC_LOCAL(uint32_t, u32Value);
5810 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5811 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5812 IEM_MC_ADVANCE_RIP();
5813 IEM_MC_END();
5814 }
5815 else
5816 {
5817 IEM_MC_BEGIN(0, 1);
5818 IEM_MC_LOCAL(uint64_t, u64Value);
5819 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5820 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5821 IEM_MC_ADVANCE_RIP();
5822 IEM_MC_END();
5823 }
5824 }
5825 else
5826 {
5827 /*
5828 * We're loading a register from memory.
5829 */
5830 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5831 {
5832 IEM_MC_BEGIN(0, 2);
5833 IEM_MC_LOCAL(uint32_t, u32Value);
5834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5836 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5837 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5838 IEM_MC_ADVANCE_RIP();
5839 IEM_MC_END();
5840 }
5841 else
5842 {
5843 IEM_MC_BEGIN(0, 2);
5844 IEM_MC_LOCAL(uint64_t, u64Value);
5845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5847 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5848 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 }
5853 return VINF_SUCCESS;
5854}
5855
5856
5857/** Opcode 0x0f 0xb8. */
5858FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5859
5860
5861/** Opcode 0x0f 0xb9. */
5862FNIEMOP_DEF(iemOp_Grp10)
5863{
5864 Log(("iemOp_Grp10 -> #UD\n"));
5865 return IEMOP_RAISE_INVALID_OPCODE();
5866}
5867
5868
5869/** Opcode 0x0f 0xba. */
5870FNIEMOP_DEF(iemOp_Grp8)
5871{
5872 IEMOP_HLP_MIN_386();
5873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5874 PCIEMOPBINSIZES pImpl;
5875 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5876 {
5877 case 0: case 1: case 2: case 3:
5878 return IEMOP_RAISE_INVALID_OPCODE();
5879 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5880 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5881 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5882 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5884 }
5885 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5886
5887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5888 {
5889 /* register destination. */
5890 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5891 IEMOP_HLP_NO_LOCK_PREFIX();
5892
5893 switch (pIemCpu->enmEffOpSize)
5894 {
5895 case IEMMODE_16BIT:
5896 IEM_MC_BEGIN(3, 0);
5897 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5898 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5900
5901 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5902 IEM_MC_REF_EFLAGS(pEFlags);
5903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5904
5905 IEM_MC_ADVANCE_RIP();
5906 IEM_MC_END();
5907 return VINF_SUCCESS;
5908
5909 case IEMMODE_32BIT:
5910 IEM_MC_BEGIN(3, 0);
5911 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5912 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5913 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5914
5915 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5916 IEM_MC_REF_EFLAGS(pEFlags);
5917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5918
5919 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 return VINF_SUCCESS;
5923
5924 case IEMMODE_64BIT:
5925 IEM_MC_BEGIN(3, 0);
5926 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5927 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5929
5930 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5931 IEM_MC_REF_EFLAGS(pEFlags);
5932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5933
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 return VINF_SUCCESS;
5937
5938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5939 }
5940 }
5941 else
5942 {
5943 /* memory destination. */
5944
5945 uint32_t fAccess;
5946 if (pImpl->pfnLockedU16)
5947 fAccess = IEM_ACCESS_DATA_RW;
5948 else /* BT */
5949 {
5950 IEMOP_HLP_NO_LOCK_PREFIX();
5951 fAccess = IEM_ACCESS_DATA_R;
5952 }
5953
5954 /** @todo test negative bit offsets! */
5955 switch (pIemCpu->enmEffOpSize)
5956 {
5957 case IEMMODE_16BIT:
5958 IEM_MC_BEGIN(3, 1);
5959 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5960 IEM_MC_ARG(uint16_t, u16Src, 1);
5961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5963
5964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5965 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5966 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5967 IEM_MC_FETCH_EFLAGS(EFlags);
5968 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5969 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5971 else
5972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5974
5975 IEM_MC_COMMIT_EFLAGS(EFlags);
5976 IEM_MC_ADVANCE_RIP();
5977 IEM_MC_END();
5978 return VINF_SUCCESS;
5979
5980 case IEMMODE_32BIT:
5981 IEM_MC_BEGIN(3, 1);
5982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5983 IEM_MC_ARG(uint32_t, u32Src, 1);
5984 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5986
5987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5988 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5989 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5990 IEM_MC_FETCH_EFLAGS(EFlags);
5991 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5992 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5993 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5994 else
5995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5996 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5997
5998 IEM_MC_COMMIT_EFLAGS(EFlags);
5999 IEM_MC_ADVANCE_RIP();
6000 IEM_MC_END();
6001 return VINF_SUCCESS;
6002
6003 case IEMMODE_64BIT:
6004 IEM_MC_BEGIN(3, 1);
6005 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6006 IEM_MC_ARG(uint64_t, u64Src, 1);
6007 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6009
6010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6011 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6012 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6013 IEM_MC_FETCH_EFLAGS(EFlags);
6014 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6015 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6017 else
6018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6020
6021 IEM_MC_COMMIT_EFLAGS(EFlags);
6022 IEM_MC_ADVANCE_RIP();
6023 IEM_MC_END();
6024 return VINF_SUCCESS;
6025
6026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6027 }
6028 }
6029
6030}
6031
6032
6033/** Opcode 0x0f 0xbb. */
6034FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6035{
6036 IEMOP_MNEMONIC("btc Ev,Gv");
6037 IEMOP_HLP_MIN_386();
6038 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6039}
6040
6041
6042/** Opcode 0x0f 0xbc. */
6043FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6044{
6045 IEMOP_MNEMONIC("bsf Gv,Ev");
6046 IEMOP_HLP_MIN_386();
6047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6048 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6049}
6050
6051
6052/** Opcode 0x0f 0xbd. */
6053FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6054{
6055 IEMOP_MNEMONIC("bsr Gv,Ev");
6056 IEMOP_HLP_MIN_386();
6057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6058 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6059}
6060
6061
6062/** Opcode 0x0f 0xbe. */
6063FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6064{
6065 IEMOP_MNEMONIC("movsx Gv,Eb");
6066 IEMOP_HLP_MIN_386();
6067
6068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6069 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6070
6071 /*
6072 * If rm is denoting a register, no more instruction bytes.
6073 */
6074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6075 {
6076 switch (pIemCpu->enmEffOpSize)
6077 {
6078 case IEMMODE_16BIT:
6079 IEM_MC_BEGIN(0, 1);
6080 IEM_MC_LOCAL(uint16_t, u16Value);
6081 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6082 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6083 IEM_MC_ADVANCE_RIP();
6084 IEM_MC_END();
6085 return VINF_SUCCESS;
6086
6087 case IEMMODE_32BIT:
6088 IEM_MC_BEGIN(0, 1);
6089 IEM_MC_LOCAL(uint32_t, u32Value);
6090 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6091 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 return VINF_SUCCESS;
6095
6096 case IEMMODE_64BIT:
6097 IEM_MC_BEGIN(0, 1);
6098 IEM_MC_LOCAL(uint64_t, u64Value);
6099 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6100 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6101 IEM_MC_ADVANCE_RIP();
6102 IEM_MC_END();
6103 return VINF_SUCCESS;
6104
6105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6106 }
6107 }
6108 else
6109 {
6110 /*
6111 * We're loading a register from memory.
6112 */
6113 switch (pIemCpu->enmEffOpSize)
6114 {
6115 case IEMMODE_16BIT:
6116 IEM_MC_BEGIN(0, 2);
6117 IEM_MC_LOCAL(uint16_t, u16Value);
6118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6120 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6121 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 return VINF_SUCCESS;
6125
6126 case IEMMODE_32BIT:
6127 IEM_MC_BEGIN(0, 2);
6128 IEM_MC_LOCAL(uint32_t, u32Value);
6129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6131 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6132 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 return VINF_SUCCESS;
6136
6137 case IEMMODE_64BIT:
6138 IEM_MC_BEGIN(0, 2);
6139 IEM_MC_LOCAL(uint64_t, u64Value);
6140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6142 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6143 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 return VINF_SUCCESS;
6147
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 }
6151}
6152
6153
6154/** Opcode 0x0f 0xbf. */
6155FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6156{
6157 IEMOP_MNEMONIC("movsx Gv,Ew");
6158 IEMOP_HLP_MIN_386();
6159
6160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6161 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6162
6163 /** @todo Not entirely sure how the operand size prefix is handled here,
6164 * assuming that it will be ignored. Would be nice to have a few
6165 * test for this. */
6166 /*
6167 * If rm is denoting a register, no more instruction bytes.
6168 */
6169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6170 {
6171 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6172 {
6173 IEM_MC_BEGIN(0, 1);
6174 IEM_MC_LOCAL(uint32_t, u32Value);
6175 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6176 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6177 IEM_MC_ADVANCE_RIP();
6178 IEM_MC_END();
6179 }
6180 else
6181 {
6182 IEM_MC_BEGIN(0, 1);
6183 IEM_MC_LOCAL(uint64_t, u64Value);
6184 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6185 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6186 IEM_MC_ADVANCE_RIP();
6187 IEM_MC_END();
6188 }
6189 }
6190 else
6191 {
6192 /*
6193 * We're loading a register from memory.
6194 */
6195 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6196 {
6197 IEM_MC_BEGIN(0, 2);
6198 IEM_MC_LOCAL(uint32_t, u32Value);
6199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6201 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6202 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 }
6206 else
6207 {
6208 IEM_MC_BEGIN(0, 2);
6209 IEM_MC_LOCAL(uint64_t, u64Value);
6210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6212 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6213 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 }
6217 }
6218 return VINF_SUCCESS;
6219}
6220
6221
6222/** Opcode 0x0f 0xc0. */
6223FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6224{
6225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6226 IEMOP_HLP_MIN_486();
6227 IEMOP_MNEMONIC("xadd Eb,Gb");
6228
6229 /*
6230 * If rm is denoting a register, no more instruction bytes.
6231 */
6232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6233 {
6234 IEMOP_HLP_NO_LOCK_PREFIX();
6235
6236 IEM_MC_BEGIN(3, 0);
6237 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6238 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6239 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6240
6241 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6242 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6243 IEM_MC_REF_EFLAGS(pEFlags);
6244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6245
6246 IEM_MC_ADVANCE_RIP();
6247 IEM_MC_END();
6248 }
6249 else
6250 {
6251 /*
6252 * We're accessing memory.
6253 */
6254 IEM_MC_BEGIN(3, 3);
6255 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6256 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6257 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6258 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6260
6261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6262 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6263 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6264 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6265 IEM_MC_FETCH_EFLAGS(EFlags);
6266 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6268 else
6269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6270
6271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6272 IEM_MC_COMMIT_EFLAGS(EFlags);
6273 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6274 IEM_MC_ADVANCE_RIP();
6275 IEM_MC_END();
6276 return VINF_SUCCESS;
6277 }
6278 return VINF_SUCCESS;
6279}
6280
6281
6282/** Opcode 0x0f 0xc1. */
6283FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6284{
6285 IEMOP_MNEMONIC("xadd Ev,Gv");
6286 IEMOP_HLP_MIN_486();
6287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6288
6289 /*
6290 * If rm is denoting a register, no more instruction bytes.
6291 */
6292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6293 {
6294 IEMOP_HLP_NO_LOCK_PREFIX();
6295
6296 switch (pIemCpu->enmEffOpSize)
6297 {
6298 case IEMMODE_16BIT:
6299 IEM_MC_BEGIN(3, 0);
6300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6301 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6303
6304 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6305 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6306 IEM_MC_REF_EFLAGS(pEFlags);
6307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6308
6309 IEM_MC_ADVANCE_RIP();
6310 IEM_MC_END();
6311 return VINF_SUCCESS;
6312
6313 case IEMMODE_32BIT:
6314 IEM_MC_BEGIN(3, 0);
6315 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6316 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6317 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6318
6319 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6320 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6321 IEM_MC_REF_EFLAGS(pEFlags);
6322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6323
6324 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6325 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6326 IEM_MC_ADVANCE_RIP();
6327 IEM_MC_END();
6328 return VINF_SUCCESS;
6329
6330 case IEMMODE_64BIT:
6331 IEM_MC_BEGIN(3, 0);
6332 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6333 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6335
6336 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6337 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6338 IEM_MC_REF_EFLAGS(pEFlags);
6339 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6340
6341 IEM_MC_ADVANCE_RIP();
6342 IEM_MC_END();
6343 return VINF_SUCCESS;
6344
6345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6346 }
6347 }
6348 else
6349 {
6350 /*
6351 * We're accessing memory.
6352 */
6353 switch (pIemCpu->enmEffOpSize)
6354 {
6355 case IEMMODE_16BIT:
6356 IEM_MC_BEGIN(3, 3);
6357 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6358 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6359 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6360 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6362
6363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6364 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6365 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6366 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6367 IEM_MC_FETCH_EFLAGS(EFlags);
6368 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6369 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6370 else
6371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6372
6373 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6374 IEM_MC_COMMIT_EFLAGS(EFlags);
6375 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6376 IEM_MC_ADVANCE_RIP();
6377 IEM_MC_END();
6378 return VINF_SUCCESS;
6379
6380 case IEMMODE_32BIT:
6381 IEM_MC_BEGIN(3, 3);
6382 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6383 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6384 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6385 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6387
6388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6389 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6390 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6391 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6392 IEM_MC_FETCH_EFLAGS(EFlags);
6393 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6394 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6395 else
6396 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6397
6398 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6399 IEM_MC_COMMIT_EFLAGS(EFlags);
6400 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6401 IEM_MC_ADVANCE_RIP();
6402 IEM_MC_END();
6403 return VINF_SUCCESS;
6404
6405 case IEMMODE_64BIT:
6406 IEM_MC_BEGIN(3, 3);
6407 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6408 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6409 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6410 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6412
6413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6414 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6415 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6416 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6417 IEM_MC_FETCH_EFLAGS(EFlags);
6418 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6419 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6420 else
6421 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6422
6423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6424 IEM_MC_COMMIT_EFLAGS(EFlags);
6425 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6426 IEM_MC_ADVANCE_RIP();
6427 IEM_MC_END();
6428 return VINF_SUCCESS;
6429
6430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6431 }
6432 }
6433}
6434
6435/** Opcode 0x0f 0xc2. */
6436FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6437
6438/** Opcode 0x0f 0xc3. */
6439FNIEMOP_STUB(iemOp_movnti_My_Gy); // solaris 10 uses this in hat_pte_zero().
6440
6441/** Opcode 0x0f 0xc4. */
6442FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6443
6444/** Opcode 0x0f 0xc5. */
6445FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6446
6447/** Opcode 0x0f 0xc6. */
6448FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6449
6450
6451/** Opcode 0x0f 0xc7 !11/1. */
6452FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6453{
6454 IEMOP_MNEMONIC("cmpxchg8b Mq");
6455
6456 IEM_MC_BEGIN(4, 3);
6457 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6458 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6459 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6460 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6461 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6462 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6464
6465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6466 IEMOP_HLP_DONE_DECODING();
6467 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6468
6469 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6470 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6471 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6472
6473 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6474 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6475 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6476
6477 IEM_MC_FETCH_EFLAGS(EFlags);
6478 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6480 else
6481 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6482
6483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6484 IEM_MC_COMMIT_EFLAGS(EFlags);
6485 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6486 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6487 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6488 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6489 IEM_MC_ENDIF();
6490 IEM_MC_ADVANCE_RIP();
6491
6492 IEM_MC_END();
6493 return VINF_SUCCESS;
6494}
6495
6496
6497/** Opcode REX.W 0x0f 0xc7 !11/1. */
6498FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6499
6500/** Opcode 0x0f 0xc7 11/6. */
6501FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6502
6503/** Opcode 0x0f 0xc7 !11/6. */
6504FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6505
6506/** Opcode 0x66 0x0f 0xc7 !11/6. */
6507FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6508
6509/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6510FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6511
6512/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6513FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6514
6515
6516/** Opcode 0x0f 0xc7. */
6517FNIEMOP_DEF(iemOp_Grp9)
6518{
6519 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6521 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6522 {
6523 case 0: case 2: case 3: case 4: case 5:
6524 return IEMOP_RAISE_INVALID_OPCODE();
6525 case 1:
6526 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6527 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6528 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6529 return IEMOP_RAISE_INVALID_OPCODE();
6530 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6531 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6532 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6533 case 6:
6534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6535 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6536 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6537 {
6538 case 0:
6539 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6540 case IEM_OP_PRF_SIZE_OP:
6541 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6542 case IEM_OP_PRF_REPZ:
6543 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6544 default:
6545 return IEMOP_RAISE_INVALID_OPCODE();
6546 }
6547 case 7:
6548 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6549 {
6550 case 0:
6551 case IEM_OP_PRF_REPZ:
6552 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6553 default:
6554 return IEMOP_RAISE_INVALID_OPCODE();
6555 }
6556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6557 }
6558}
6559
6560
6561/**
6562 * Common 'bswap register' helper.
6563 */
6564FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6565{
6566 IEMOP_HLP_NO_LOCK_PREFIX();
6567 switch (pIemCpu->enmEffOpSize)
6568 {
6569 case IEMMODE_16BIT:
6570 IEM_MC_BEGIN(1, 0);
6571 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6572 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6573 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6574 IEM_MC_ADVANCE_RIP();
6575 IEM_MC_END();
6576 return VINF_SUCCESS;
6577
6578 case IEMMODE_32BIT:
6579 IEM_MC_BEGIN(1, 0);
6580 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6581 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6582 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6583 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6584 IEM_MC_ADVANCE_RIP();
6585 IEM_MC_END();
6586 return VINF_SUCCESS;
6587
6588 case IEMMODE_64BIT:
6589 IEM_MC_BEGIN(1, 0);
6590 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6591 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6592 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6593 IEM_MC_ADVANCE_RIP();
6594 IEM_MC_END();
6595 return VINF_SUCCESS;
6596
6597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6598 }
6599}
6600
6601
6602/** Opcode 0x0f 0xc8. */
6603FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6604{
6605 IEMOP_MNEMONIC("bswap rAX/r8");
6606 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6607 prefix. REX.B is the correct prefix it appears. For a parallel
6608 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6609 IEMOP_HLP_MIN_486();
6610 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6611}
6612
6613
6614/** Opcode 0x0f 0xc9. */
6615FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6616{
6617 IEMOP_MNEMONIC("bswap rCX/r9");
6618 IEMOP_HLP_MIN_486();
6619 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6620}
6621
6622
6623/** Opcode 0x0f 0xca. */
6624FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6625{
6626 IEMOP_MNEMONIC("bswap rDX/r9");
6627 IEMOP_HLP_MIN_486();
6628 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6629}
6630
6631
6632/** Opcode 0x0f 0xcb. */
6633FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6634{
6635 IEMOP_MNEMONIC("bswap rBX/r9");
6636 IEMOP_HLP_MIN_486();
6637 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6638}
6639
6640
6641/** Opcode 0x0f 0xcc. */
6642FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6643{
6644 IEMOP_MNEMONIC("bswap rSP/r12");
6645 IEMOP_HLP_MIN_486();
6646 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6647}
6648
6649
6650/** Opcode 0x0f 0xcd. */
6651FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6652{
6653 IEMOP_MNEMONIC("bswap rBP/r13");
6654 IEMOP_HLP_MIN_486();
6655 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6656}
6657
6658
6659/** Opcode 0x0f 0xce. */
6660FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6661{
6662 IEMOP_MNEMONIC("bswap rSI/r14");
6663 IEMOP_HLP_MIN_486();
6664 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6665}
6666
6667
6668/** Opcode 0x0f 0xcf. */
6669FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6670{
6671 IEMOP_MNEMONIC("bswap rDI/r15");
6672 IEMOP_HLP_MIN_486();
6673 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6674}
6675
6676
6677
6678/** Opcode 0x0f 0xd0. */
6679FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6680/** Opcode 0x0f 0xd1. */
6681FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6682/** Opcode 0x0f 0xd2. */
6683FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6684/** Opcode 0x0f 0xd3. */
6685FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6686/** Opcode 0x0f 0xd4. */
6687FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6688/** Opcode 0x0f 0xd5. */
6689FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6690/** Opcode 0x0f 0xd6. */
6691FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6692
6693
6694/** Opcode 0x0f 0xd7. */
6695FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6696{
6697 /* Docs says register only. */
6698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6699 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6700 return IEMOP_RAISE_INVALID_OPCODE();
6701
6702 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6703 /** @todo testcase: Check that the instruction implicitly clears the high
6704 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6705 * and opcode modifications are made to work with the whole width (not
6706 * just 128). */
6707 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6708 {
6709 case IEM_OP_PRF_SIZE_OP: /* SSE */
6710 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6711 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6712 IEM_MC_BEGIN(2, 0);
6713 IEM_MC_ARG(uint64_t *, pDst, 0);
6714 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6715 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6716 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6717 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6718 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722
6723 case 0: /* MMX */
6724 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6725 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6726 IEM_MC_BEGIN(2, 0);
6727 IEM_MC_ARG(uint64_t *, pDst, 0);
6728 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6730 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6731 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6732 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6733 IEM_MC_ADVANCE_RIP();
6734 IEM_MC_END();
6735 return VINF_SUCCESS;
6736
6737 default:
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739 }
6740}
6741
6742
6743/** Opcode 0x0f 0xd8. */
6744FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6745/** Opcode 0x0f 0xd9. */
6746FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6747/** Opcode 0x0f 0xda. */
6748FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6749/** Opcode 0x0f 0xdb. */
6750FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6751/** Opcode 0x0f 0xdc. */
6752FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6753/** Opcode 0x0f 0xdd. */
6754FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6755/** Opcode 0x0f 0xde. */
6756FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6757/** Opcode 0x0f 0xdf. */
6758FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6759/** Opcode 0x0f 0xe0. */
6760FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6761/** Opcode 0x0f 0xe1. */
6762FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6763/** Opcode 0x0f 0xe2. */
6764FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6765/** Opcode 0x0f 0xe3. */
6766FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6767/** Opcode 0x0f 0xe4. */
6768FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6769/** Opcode 0x0f 0xe5. */
6770FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6771/** Opcode 0x0f 0xe6. */
6772FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6773/** Opcode 0x0f 0xe7. */
6774FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6775/** Opcode 0x0f 0xe8. */
6776FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6777/** Opcode 0x0f 0xe9. */
6778FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6779/** Opcode 0x0f 0xea. */
6780FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6781/** Opcode 0x0f 0xeb. */
6782FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6783/** Opcode 0x0f 0xec. */
6784FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6785/** Opcode 0x0f 0xed. */
6786FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6787/** Opcode 0x0f 0xee. */
6788FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6789
6790
6791/** Opcode 0x0f 0xef. */
6792FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6793{
6794 IEMOP_MNEMONIC("pxor");
6795 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6796}
6797
6798
6799/** Opcode 0x0f 0xf0. */
6800FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6801/** Opcode 0x0f 0xf1. */
6802FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6803/** Opcode 0x0f 0xf2. */
6804FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6805/** Opcode 0x0f 0xf3. */
6806FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6807/** Opcode 0x0f 0xf4. */
6808FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6809/** Opcode 0x0f 0xf5. */
6810FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6811/** Opcode 0x0f 0xf6. */
6812FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6813/** Opcode 0x0f 0xf7. */
6814FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6815/** Opcode 0x0f 0xf8. */
6816FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6817/** Opcode 0x0f 0xf9. */
6818FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6819/** Opcode 0x0f 0xfa. */
6820FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6821/** Opcode 0x0f 0xfb. */
6822FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6823/** Opcode 0x0f 0xfc. */
6824FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6825/** Opcode 0x0f 0xfd. */
6826FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6827/** Opcode 0x0f 0xfe. */
6828FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6829
6830
6831const PFNIEMOP g_apfnTwoByteMap[256] =
6832{
6833 /* 0x00 */ iemOp_Grp6,
6834 /* 0x01 */ iemOp_Grp7,
6835 /* 0x02 */ iemOp_lar_Gv_Ew,
6836 /* 0x03 */ iemOp_lsl_Gv_Ew,
6837 /* 0x04 */ iemOp_Invalid,
6838 /* 0x05 */ iemOp_syscall,
6839 /* 0x06 */ iemOp_clts,
6840 /* 0x07 */ iemOp_sysret,
6841 /* 0x08 */ iemOp_invd,
6842 /* 0x09 */ iemOp_wbinvd,
6843 /* 0x0a */ iemOp_Invalid,
6844 /* 0x0b */ iemOp_ud2,
6845 /* 0x0c */ iemOp_Invalid,
6846 /* 0x0d */ iemOp_nop_Ev_GrpP,
6847 /* 0x0e */ iemOp_femms,
6848 /* 0x0f */ iemOp_3Dnow,
6849 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6850 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6851 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6852 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6853 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6854 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6855 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6856 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6857 /* 0x18 */ iemOp_prefetch_Grp16,
6858 /* 0x19 */ iemOp_nop_Ev,
6859 /* 0x1a */ iemOp_nop_Ev,
6860 /* 0x1b */ iemOp_nop_Ev,
6861 /* 0x1c */ iemOp_nop_Ev,
6862 /* 0x1d */ iemOp_nop_Ev,
6863 /* 0x1e */ iemOp_nop_Ev,
6864 /* 0x1f */ iemOp_nop_Ev,
6865 /* 0x20 */ iemOp_mov_Rd_Cd,
6866 /* 0x21 */ iemOp_mov_Rd_Dd,
6867 /* 0x22 */ iemOp_mov_Cd_Rd,
6868 /* 0x23 */ iemOp_mov_Dd_Rd,
6869 /* 0x24 */ iemOp_mov_Rd_Td,
6870 /* 0x25 */ iemOp_Invalid,
6871 /* 0x26 */ iemOp_mov_Td_Rd,
6872 /* 0x27 */ iemOp_Invalid,
6873 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6874 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6875 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6876 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6877 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6878 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6879 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6880 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6881 /* 0x30 */ iemOp_wrmsr,
6882 /* 0x31 */ iemOp_rdtsc,
6883 /* 0x32 */ iemOp_rdmsr,
6884 /* 0x33 */ iemOp_rdpmc,
6885 /* 0x34 */ iemOp_sysenter,
6886 /* 0x35 */ iemOp_sysexit,
6887 /* 0x36 */ iemOp_Invalid,
6888 /* 0x37 */ iemOp_getsec,
6889 /* 0x38 */ iemOp_3byte_Esc_A4,
6890 /* 0x39 */ iemOp_Invalid,
6891 /* 0x3a */ iemOp_3byte_Esc_A5,
6892 /* 0x3b */ iemOp_Invalid,
6893 /* 0x3c */ iemOp_Invalid,
6894 /* 0x3d */ iemOp_Invalid,
6895 /* 0x3e */ iemOp_Invalid,
6896 /* 0x3f */ iemOp_Invalid,
6897 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6898 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6899 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6900 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6901 /* 0x44 */ iemOp_cmove_Gv_Ev,
6902 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6903 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6904 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6905 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6906 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6907 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6908 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6909 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6910 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6911 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6912 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6913 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6914 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6915 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6916 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6917 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6918 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6919 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6920 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6921 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6922 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6923 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6924 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6925 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6926 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6927 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6928 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6929 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6930 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6931 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6932 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6933 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6934 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6935 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6936 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6937 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6938 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6939 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6940 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6941 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6942 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6943 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6944 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6945 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6946 /* 0x71 */ iemOp_Grp12,
6947 /* 0x72 */ iemOp_Grp13,
6948 /* 0x73 */ iemOp_Grp14,
6949 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6950 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6951 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6952 /* 0x77 */ iemOp_emms,
6953 /* 0x78 */ iemOp_vmread_AmdGrp17,
6954 /* 0x79 */ iemOp_vmwrite,
6955 /* 0x7a */ iemOp_Invalid,
6956 /* 0x7b */ iemOp_Invalid,
6957 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6958 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6959 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6960 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6961 /* 0x80 */ iemOp_jo_Jv,
6962 /* 0x81 */ iemOp_jno_Jv,
6963 /* 0x82 */ iemOp_jc_Jv,
6964 /* 0x83 */ iemOp_jnc_Jv,
6965 /* 0x84 */ iemOp_je_Jv,
6966 /* 0x85 */ iemOp_jne_Jv,
6967 /* 0x86 */ iemOp_jbe_Jv,
6968 /* 0x87 */ iemOp_jnbe_Jv,
6969 /* 0x88 */ iemOp_js_Jv,
6970 /* 0x89 */ iemOp_jns_Jv,
6971 /* 0x8a */ iemOp_jp_Jv,
6972 /* 0x8b */ iemOp_jnp_Jv,
6973 /* 0x8c */ iemOp_jl_Jv,
6974 /* 0x8d */ iemOp_jnl_Jv,
6975 /* 0x8e */ iemOp_jle_Jv,
6976 /* 0x8f */ iemOp_jnle_Jv,
6977 /* 0x90 */ iemOp_seto_Eb,
6978 /* 0x91 */ iemOp_setno_Eb,
6979 /* 0x92 */ iemOp_setc_Eb,
6980 /* 0x93 */ iemOp_setnc_Eb,
6981 /* 0x94 */ iemOp_sete_Eb,
6982 /* 0x95 */ iemOp_setne_Eb,
6983 /* 0x96 */ iemOp_setbe_Eb,
6984 /* 0x97 */ iemOp_setnbe_Eb,
6985 /* 0x98 */ iemOp_sets_Eb,
6986 /* 0x99 */ iemOp_setns_Eb,
6987 /* 0x9a */ iemOp_setp_Eb,
6988 /* 0x9b */ iemOp_setnp_Eb,
6989 /* 0x9c */ iemOp_setl_Eb,
6990 /* 0x9d */ iemOp_setnl_Eb,
6991 /* 0x9e */ iemOp_setle_Eb,
6992 /* 0x9f */ iemOp_setnle_Eb,
6993 /* 0xa0 */ iemOp_push_fs,
6994 /* 0xa1 */ iemOp_pop_fs,
6995 /* 0xa2 */ iemOp_cpuid,
6996 /* 0xa3 */ iemOp_bt_Ev_Gv,
6997 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6998 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6999 /* 0xa6 */ iemOp_Invalid,
7000 /* 0xa7 */ iemOp_Invalid,
7001 /* 0xa8 */ iemOp_push_gs,
7002 /* 0xa9 */ iemOp_pop_gs,
7003 /* 0xaa */ iemOp_rsm,
7004 /* 0xab */ iemOp_bts_Ev_Gv,
7005 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7006 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7007 /* 0xae */ iemOp_Grp15,
7008 /* 0xaf */ iemOp_imul_Gv_Ev,
7009 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7010 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7011 /* 0xb2 */ iemOp_lss_Gv_Mp,
7012 /* 0xb3 */ iemOp_btr_Ev_Gv,
7013 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7014 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7015 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7016 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7017 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7018 /* 0xb9 */ iemOp_Grp10,
7019 /* 0xba */ iemOp_Grp8,
7020 /* 0xbd */ iemOp_btc_Ev_Gv,
7021 /* 0xbc */ iemOp_bsf_Gv_Ev,
7022 /* 0xbd */ iemOp_bsr_Gv_Ev,
7023 /* 0xbe */ iemOp_movsx_Gv_Eb,
7024 /* 0xbf */ iemOp_movsx_Gv_Ew,
7025 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7026 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7027 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7028 /* 0xc3 */ iemOp_movnti_My_Gy,
7029 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7030 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7031 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7032 /* 0xc7 */ iemOp_Grp9,
7033 /* 0xc8 */ iemOp_bswap_rAX_r8,
7034 /* 0xc9 */ iemOp_bswap_rCX_r9,
7035 /* 0xca */ iemOp_bswap_rDX_r10,
7036 /* 0xcb */ iemOp_bswap_rBX_r11,
7037 /* 0xcc */ iemOp_bswap_rSP_r12,
7038 /* 0xcd */ iemOp_bswap_rBP_r13,
7039 /* 0xce */ iemOp_bswap_rSI_r14,
7040 /* 0xcf */ iemOp_bswap_rDI_r15,
7041 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7042 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7043 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7044 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7045 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7046 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7047 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7048 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7049 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7050 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7051 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7052 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7053 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7054 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7055 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7056 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7057 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7058 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7059 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7060 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7061 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7062 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7063 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7064 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7065 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7066 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7067 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7068 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7069 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7070 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7071 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7072 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7073 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7074 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7075 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7076 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7077 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7078 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7079 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7080 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7081 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7082 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7083 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7084 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7085 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7086 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7087 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7088 /* 0xff */ iemOp_Invalid
7089};
7090
7091/** @} */
7092
7093
7094/** @name One byte opcodes.
7095 *
7096 * @{
7097 */
7098
7099/** Opcode 0x00. */
7100FNIEMOP_DEF(iemOp_add_Eb_Gb)
7101{
7102 IEMOP_MNEMONIC("add Eb,Gb");
7103 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7104}
7105
7106
7107/** Opcode 0x01. */
7108FNIEMOP_DEF(iemOp_add_Ev_Gv)
7109{
7110 IEMOP_MNEMONIC("add Ev,Gv");
7111 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7112}
7113
7114
7115/** Opcode 0x02. */
7116FNIEMOP_DEF(iemOp_add_Gb_Eb)
7117{
7118 IEMOP_MNEMONIC("add Gb,Eb");
7119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7120}
7121
7122
7123/** Opcode 0x03. */
7124FNIEMOP_DEF(iemOp_add_Gv_Ev)
7125{
7126 IEMOP_MNEMONIC("add Gv,Ev");
7127 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7128}
7129
7130
7131/** Opcode 0x04. */
7132FNIEMOP_DEF(iemOp_add_Al_Ib)
7133{
7134 IEMOP_MNEMONIC("add al,Ib");
7135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7136}
7137
7138
7139/** Opcode 0x05. */
7140FNIEMOP_DEF(iemOp_add_eAX_Iz)
7141{
7142 IEMOP_MNEMONIC("add rAX,Iz");
7143 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7144}
7145
7146
7147/** Opcode 0x06. */
7148FNIEMOP_DEF(iemOp_push_ES)
7149{
7150 IEMOP_MNEMONIC("push es");
7151 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7152}
7153
7154
7155/** Opcode 0x07. */
7156FNIEMOP_DEF(iemOp_pop_ES)
7157{
7158 IEMOP_MNEMONIC("pop es");
7159 IEMOP_HLP_NO_64BIT();
7160 IEMOP_HLP_NO_LOCK_PREFIX();
7161 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7162}
7163
7164
7165/** Opcode 0x08. */
7166FNIEMOP_DEF(iemOp_or_Eb_Gb)
7167{
7168 IEMOP_MNEMONIC("or Eb,Gb");
7169 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7170 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7171}
7172
7173
7174/** Opcode 0x09. */
7175FNIEMOP_DEF(iemOp_or_Ev_Gv)
7176{
7177 IEMOP_MNEMONIC("or Ev,Gv ");
7178 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7179 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7180}
7181
7182
7183/** Opcode 0x0a. */
7184FNIEMOP_DEF(iemOp_or_Gb_Eb)
7185{
7186 IEMOP_MNEMONIC("or Gb,Eb");
7187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7188 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7189}
7190
7191
7192/** Opcode 0x0b. */
7193FNIEMOP_DEF(iemOp_or_Gv_Ev)
7194{
7195 IEMOP_MNEMONIC("or Gv,Ev");
7196 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7197 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7198}
7199
7200
7201/** Opcode 0x0c. */
7202FNIEMOP_DEF(iemOp_or_Al_Ib)
7203{
7204 IEMOP_MNEMONIC("or al,Ib");
7205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7207}
7208
7209
7210/** Opcode 0x0d. */
7211FNIEMOP_DEF(iemOp_or_eAX_Iz)
7212{
7213 IEMOP_MNEMONIC("or rAX,Iz");
7214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7215 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7216}
7217
7218
7219/** Opcode 0x0e. */
7220FNIEMOP_DEF(iemOp_push_CS)
7221{
7222 IEMOP_MNEMONIC("push cs");
7223 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7224}
7225
7226
7227/** Opcode 0x0f. */
7228FNIEMOP_DEF(iemOp_2byteEscape)
7229{
7230 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7231 /** @todo PUSH CS on 8086, undefined on 80186. */
7232 IEMOP_HLP_MIN_286();
7233 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7234}
7235
7236/** Opcode 0x10. */
7237FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7238{
7239 IEMOP_MNEMONIC("adc Eb,Gb");
7240 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7241}
7242
7243
7244/** Opcode 0x11. */
7245FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7246{
7247 IEMOP_MNEMONIC("adc Ev,Gv");
7248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7249}
7250
7251
7252/** Opcode 0x12. */
7253FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7254{
7255 IEMOP_MNEMONIC("adc Gb,Eb");
7256 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7257}
7258
7259
7260/** Opcode 0x13. */
7261FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7262{
7263 IEMOP_MNEMONIC("adc Gv,Ev");
7264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7265}
7266
7267
7268/** Opcode 0x14. */
7269FNIEMOP_DEF(iemOp_adc_Al_Ib)
7270{
7271 IEMOP_MNEMONIC("adc al,Ib");
7272 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7273}
7274
7275
7276/** Opcode 0x15. */
7277FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7278{
7279 IEMOP_MNEMONIC("adc rAX,Iz");
7280 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7281}
7282
7283
7284/** Opcode 0x16. */
7285FNIEMOP_DEF(iemOp_push_SS)
7286{
7287 IEMOP_MNEMONIC("push ss");
7288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7289}
7290
7291
7292/** Opcode 0x17. */
7293FNIEMOP_DEF(iemOp_pop_SS)
7294{
7295 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7296 IEMOP_HLP_NO_LOCK_PREFIX();
7297 IEMOP_HLP_NO_64BIT();
7298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7299}
7300
7301
7302/** Opcode 0x18. */
7303FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7304{
7305 IEMOP_MNEMONIC("sbb Eb,Gb");
7306 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7307}
7308
7309
7310/** Opcode 0x19. */
7311FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7312{
7313 IEMOP_MNEMONIC("sbb Ev,Gv");
7314 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7315}
7316
7317
7318/** Opcode 0x1a. */
7319FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7320{
7321 IEMOP_MNEMONIC("sbb Gb,Eb");
7322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7323}
7324
7325
7326/** Opcode 0x1b. */
7327FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7328{
7329 IEMOP_MNEMONIC("sbb Gv,Ev");
7330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7331}
7332
7333
7334/** Opcode 0x1c. */
7335FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7336{
7337 IEMOP_MNEMONIC("sbb al,Ib");
7338 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7339}
7340
7341
7342/** Opcode 0x1d. */
7343FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7344{
7345 IEMOP_MNEMONIC("sbb rAX,Iz");
7346 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7347}
7348
7349
7350/** Opcode 0x1e. */
7351FNIEMOP_DEF(iemOp_push_DS)
7352{
7353 IEMOP_MNEMONIC("push ds");
7354 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7355}
7356
7357
7358/** Opcode 0x1f. */
7359FNIEMOP_DEF(iemOp_pop_DS)
7360{
7361 IEMOP_MNEMONIC("pop ds");
7362 IEMOP_HLP_NO_LOCK_PREFIX();
7363 IEMOP_HLP_NO_64BIT();
7364 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7365}
7366
7367
7368/** Opcode 0x20. */
7369FNIEMOP_DEF(iemOp_and_Eb_Gb)
7370{
7371 IEMOP_MNEMONIC("and Eb,Gb");
7372 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7373 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7374}
7375
7376
7377/** Opcode 0x21. */
7378FNIEMOP_DEF(iemOp_and_Ev_Gv)
7379{
7380 IEMOP_MNEMONIC("and Ev,Gv");
7381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7382 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7383}
7384
7385
7386/** Opcode 0x22. */
7387FNIEMOP_DEF(iemOp_and_Gb_Eb)
7388{
7389 IEMOP_MNEMONIC("and Gb,Eb");
7390 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7391 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7392}
7393
7394
7395/** Opcode 0x23. */
7396FNIEMOP_DEF(iemOp_and_Gv_Ev)
7397{
7398 IEMOP_MNEMONIC("and Gv,Ev");
7399 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7400 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7401}
7402
7403
7404/** Opcode 0x24. */
7405FNIEMOP_DEF(iemOp_and_Al_Ib)
7406{
7407 IEMOP_MNEMONIC("and al,Ib");
7408 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7409 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7410}
7411
7412
7413/** Opcode 0x25. */
7414FNIEMOP_DEF(iemOp_and_eAX_Iz)
7415{
7416 IEMOP_MNEMONIC("and rAX,Iz");
7417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7419}
7420
7421
7422/** Opcode 0x26. */
7423FNIEMOP_DEF(iemOp_seg_ES)
7424{
7425 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7426 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7427 pIemCpu->iEffSeg = X86_SREG_ES;
7428
7429 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7430 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7431}
7432
7433
7434/** Opcode 0x27. */
7435FNIEMOP_DEF(iemOp_daa)
7436{
7437 IEMOP_MNEMONIC("daa AL");
7438 IEMOP_HLP_NO_64BIT();
7439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7440 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7442}
7443
7444
7445/** Opcode 0x28. */
7446FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7447{
7448 IEMOP_MNEMONIC("sub Eb,Gb");
7449 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7450}
7451
7452
7453/** Opcode 0x29. */
7454FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7455{
7456 IEMOP_MNEMONIC("sub Ev,Gv");
7457 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7458}
7459
7460
7461/** Opcode 0x2a. */
7462FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7463{
7464 IEMOP_MNEMONIC("sub Gb,Eb");
7465 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7466}
7467
7468
7469/** Opcode 0x2b. */
7470FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7471{
7472 IEMOP_MNEMONIC("sub Gv,Ev");
7473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7474}
7475
7476
7477/** Opcode 0x2c. */
7478FNIEMOP_DEF(iemOp_sub_Al_Ib)
7479{
7480 IEMOP_MNEMONIC("sub al,Ib");
7481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7482}
7483
7484
7485/** Opcode 0x2d. */
7486FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7487{
7488 IEMOP_MNEMONIC("sub rAX,Iz");
7489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7490}
7491
7492
7493/** Opcode 0x2e. */
7494FNIEMOP_DEF(iemOp_seg_CS)
7495{
7496 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7497 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7498 pIemCpu->iEffSeg = X86_SREG_CS;
7499
7500 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7501 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7502}
7503
7504
7505/** Opcode 0x2f. */
7506FNIEMOP_DEF(iemOp_das)
7507{
7508 IEMOP_MNEMONIC("das AL");
7509 IEMOP_HLP_NO_64BIT();
7510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7512 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7513}
7514
7515
7516/** Opcode 0x30. */
7517FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7518{
7519 IEMOP_MNEMONIC("xor Eb,Gb");
7520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7521 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7522}
7523
7524
7525/** Opcode 0x31. */
7526FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7527{
7528 IEMOP_MNEMONIC("xor Ev,Gv");
7529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7530 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7531}
7532
7533
7534/** Opcode 0x32. */
7535FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7536{
7537 IEMOP_MNEMONIC("xor Gb,Eb");
7538 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7539 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7540}
7541
7542
7543/** Opcode 0x33. */
7544FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7545{
7546 IEMOP_MNEMONIC("xor Gv,Ev");
7547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7548 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7549}
7550
7551
7552/** Opcode 0x34. */
7553FNIEMOP_DEF(iemOp_xor_Al_Ib)
7554{
7555 IEMOP_MNEMONIC("xor al,Ib");
7556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7557 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7558}
7559
7560
7561/** Opcode 0x35. */
7562FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7563{
7564 IEMOP_MNEMONIC("xor rAX,Iz");
7565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7567}
7568
7569
7570/** Opcode 0x36. */
7571FNIEMOP_DEF(iemOp_seg_SS)
7572{
7573 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7574 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7575 pIemCpu->iEffSeg = X86_SREG_SS;
7576
7577 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7578 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7579}
7580
7581
7582/** Opcode 0x37. */
7583FNIEMOP_STUB(iemOp_aaa);
7584
7585
7586/** Opcode 0x38. */
7587FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7588{
7589 IEMOP_MNEMONIC("cmp Eb,Gb");
7590 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7592}
7593
7594
7595/** Opcode 0x39. */
7596FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7597{
7598 IEMOP_MNEMONIC("cmp Ev,Gv");
7599 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7601}
7602
7603
7604/** Opcode 0x3a. */
7605FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7606{
7607 IEMOP_MNEMONIC("cmp Gb,Eb");
7608 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7609}
7610
7611
7612/** Opcode 0x3b. */
7613FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7614{
7615 IEMOP_MNEMONIC("cmp Gv,Ev");
7616 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7617}
7618
7619
7620/** Opcode 0x3c. */
7621FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7622{
7623 IEMOP_MNEMONIC("cmp al,Ib");
7624 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7625}
7626
7627
7628/** Opcode 0x3d. */
7629FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7630{
7631 IEMOP_MNEMONIC("cmp rAX,Iz");
7632 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7633}
7634
7635
7636/** Opcode 0x3e. */
7637FNIEMOP_DEF(iemOp_seg_DS)
7638{
7639 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7640 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7641 pIemCpu->iEffSeg = X86_SREG_DS;
7642
7643 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7644 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7645}
7646
7647
7648/** Opcode 0x3f. */
7649FNIEMOP_STUB(iemOp_aas);
7650
7651/**
7652 * Common 'inc/dec/not/neg register' helper.
7653 */
7654FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7655{
7656 IEMOP_HLP_NO_LOCK_PREFIX();
7657 switch (pIemCpu->enmEffOpSize)
7658 {
7659 case IEMMODE_16BIT:
7660 IEM_MC_BEGIN(2, 0);
7661 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7662 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7663 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7664 IEM_MC_REF_EFLAGS(pEFlags);
7665 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7666 IEM_MC_ADVANCE_RIP();
7667 IEM_MC_END();
7668 return VINF_SUCCESS;
7669
7670 case IEMMODE_32BIT:
7671 IEM_MC_BEGIN(2, 0);
7672 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7673 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7674 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7675 IEM_MC_REF_EFLAGS(pEFlags);
7676 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7677 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7678 IEM_MC_ADVANCE_RIP();
7679 IEM_MC_END();
7680 return VINF_SUCCESS;
7681
7682 case IEMMODE_64BIT:
7683 IEM_MC_BEGIN(2, 0);
7684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7685 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7686 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7687 IEM_MC_REF_EFLAGS(pEFlags);
7688 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7689 IEM_MC_ADVANCE_RIP();
7690 IEM_MC_END();
7691 return VINF_SUCCESS;
7692 }
7693 return VINF_SUCCESS;
7694}
7695
7696
7697/** Opcode 0x40. */
7698FNIEMOP_DEF(iemOp_inc_eAX)
7699{
7700 /*
7701 * This is a REX prefix in 64-bit mode.
7702 */
7703 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7704 {
7705 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7706 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7707
7708 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7709 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7710 }
7711
7712 IEMOP_MNEMONIC("inc eAX");
7713 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7714}
7715
7716
7717/** Opcode 0x41. */
7718FNIEMOP_DEF(iemOp_inc_eCX)
7719{
7720 /*
7721 * This is a REX prefix in 64-bit mode.
7722 */
7723 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7724 {
7725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7726 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7727 pIemCpu->uRexB = 1 << 3;
7728
7729 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7730 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7731 }
7732
7733 IEMOP_MNEMONIC("inc eCX");
7734 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7735}
7736
7737
7738/** Opcode 0x42. */
7739FNIEMOP_DEF(iemOp_inc_eDX)
7740{
7741 /*
7742 * This is a REX prefix in 64-bit mode.
7743 */
7744 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7745 {
7746 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7747 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7748 pIemCpu->uRexIndex = 1 << 3;
7749
7750 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7751 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7752 }
7753
7754 IEMOP_MNEMONIC("inc eDX");
7755 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7756}
7757
7758
7759
7760/** Opcode 0x43. */
7761FNIEMOP_DEF(iemOp_inc_eBX)
7762{
7763 /*
7764 * This is a REX prefix in 64-bit mode.
7765 */
7766 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7767 {
7768 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7769 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7770 pIemCpu->uRexB = 1 << 3;
7771 pIemCpu->uRexIndex = 1 << 3;
7772
7773 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7774 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7775 }
7776
7777 IEMOP_MNEMONIC("inc eBX");
7778 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7779}
7780
7781
7782/** Opcode 0x44. */
7783FNIEMOP_DEF(iemOp_inc_eSP)
7784{
7785 /*
7786 * This is a REX prefix in 64-bit mode.
7787 */
7788 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7789 {
7790 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7791 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7792 pIemCpu->uRexReg = 1 << 3;
7793
7794 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7795 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7796 }
7797
7798 IEMOP_MNEMONIC("inc eSP");
7799 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7800}
7801
7802
7803/** Opcode 0x45. */
7804FNIEMOP_DEF(iemOp_inc_eBP)
7805{
7806 /*
7807 * This is a REX prefix in 64-bit mode.
7808 */
7809 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7810 {
7811 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7812 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7813 pIemCpu->uRexReg = 1 << 3;
7814 pIemCpu->uRexB = 1 << 3;
7815
7816 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7817 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7818 }
7819
7820 IEMOP_MNEMONIC("inc eBP");
7821 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7822}
7823
7824
7825/** Opcode 0x46. */
7826FNIEMOP_DEF(iemOp_inc_eSI)
7827{
7828 /*
7829 * This is a REX prefix in 64-bit mode.
7830 */
7831 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7832 {
7833 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7834 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7835 pIemCpu->uRexReg = 1 << 3;
7836 pIemCpu->uRexIndex = 1 << 3;
7837
7838 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7839 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7840 }
7841
7842 IEMOP_MNEMONIC("inc eSI");
7843 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7844}
7845
7846
7847/** Opcode 0x47. */
7848FNIEMOP_DEF(iemOp_inc_eDI)
7849{
7850 /*
7851 * This is a REX prefix in 64-bit mode.
7852 */
7853 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7854 {
7855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7856 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7857 pIemCpu->uRexReg = 1 << 3;
7858 pIemCpu->uRexB = 1 << 3;
7859 pIemCpu->uRexIndex = 1 << 3;
7860
7861 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7862 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7863 }
7864
7865 IEMOP_MNEMONIC("inc eDI");
7866 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7867}
7868
7869
7870/** Opcode 0x48. */
7871FNIEMOP_DEF(iemOp_dec_eAX)
7872{
7873 /*
7874 * This is a REX prefix in 64-bit mode.
7875 */
7876 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7877 {
7878 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7879 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7880 iemRecalEffOpSize(pIemCpu);
7881
7882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7884 }
7885
7886 IEMOP_MNEMONIC("dec eAX");
7887 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7888}
7889
7890
7891/** Opcode 0x49. */
7892FNIEMOP_DEF(iemOp_dec_eCX)
7893{
7894 /*
7895 * This is a REX prefix in 64-bit mode.
7896 */
7897 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7898 {
7899 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7900 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7901 pIemCpu->uRexB = 1 << 3;
7902 iemRecalEffOpSize(pIemCpu);
7903
7904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7906 }
7907
7908 IEMOP_MNEMONIC("dec eCX");
7909 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7910}
7911
7912
7913/** Opcode 0x4a. */
7914FNIEMOP_DEF(iemOp_dec_eDX)
7915{
7916 /*
7917 * This is a REX prefix in 64-bit mode.
7918 */
7919 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7920 {
7921 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7922 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7923 pIemCpu->uRexIndex = 1 << 3;
7924 iemRecalEffOpSize(pIemCpu);
7925
7926 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7927 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7928 }
7929
7930 IEMOP_MNEMONIC("dec eDX");
7931 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7932}
7933
7934
7935/** Opcode 0x4b. */
7936FNIEMOP_DEF(iemOp_dec_eBX)
7937{
7938 /*
7939 * This is a REX prefix in 64-bit mode.
7940 */
7941 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7942 {
7943 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7944 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7945 pIemCpu->uRexB = 1 << 3;
7946 pIemCpu->uRexIndex = 1 << 3;
7947 iemRecalEffOpSize(pIemCpu);
7948
7949 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7950 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7951 }
7952
7953 IEMOP_MNEMONIC("dec eBX");
7954 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7955}
7956
7957
7958/** Opcode 0x4c. */
7959FNIEMOP_DEF(iemOp_dec_eSP)
7960{
7961 /*
7962 * This is a REX prefix in 64-bit mode.
7963 */
7964 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7965 {
7966 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7967 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7968 pIemCpu->uRexReg = 1 << 3;
7969 iemRecalEffOpSize(pIemCpu);
7970
7971 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7972 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7973 }
7974
7975 IEMOP_MNEMONIC("dec eSP");
7976 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7977}
7978
7979
7980/** Opcode 0x4d. */
7981FNIEMOP_DEF(iemOp_dec_eBP)
7982{
7983 /*
7984 * This is a REX prefix in 64-bit mode.
7985 */
7986 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7987 {
7988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7989 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7990 pIemCpu->uRexReg = 1 << 3;
7991 pIemCpu->uRexB = 1 << 3;
7992 iemRecalEffOpSize(pIemCpu);
7993
7994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7996 }
7997
7998 IEMOP_MNEMONIC("dec eBP");
7999 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8000}
8001
8002
8003/** Opcode 0x4e. */
8004FNIEMOP_DEF(iemOp_dec_eSI)
8005{
8006 /*
8007 * This is a REX prefix in 64-bit mode.
8008 */
8009 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8010 {
8011 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8012 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8013 pIemCpu->uRexReg = 1 << 3;
8014 pIemCpu->uRexIndex = 1 << 3;
8015 iemRecalEffOpSize(pIemCpu);
8016
8017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8019 }
8020
8021 IEMOP_MNEMONIC("dec eSI");
8022 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8023}
8024
8025
8026/** Opcode 0x4f. */
8027FNIEMOP_DEF(iemOp_dec_eDI)
8028{
8029 /*
8030 * This is a REX prefix in 64-bit mode.
8031 */
8032 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8033 {
8034 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8035 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8036 pIemCpu->uRexReg = 1 << 3;
8037 pIemCpu->uRexB = 1 << 3;
8038 pIemCpu->uRexIndex = 1 << 3;
8039 iemRecalEffOpSize(pIemCpu);
8040
8041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8043 }
8044
8045 IEMOP_MNEMONIC("dec eDI");
8046 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8047}
8048
8049
8050/**
8051 * Common 'push register' helper.
8052 */
8053FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8054{
8055 IEMOP_HLP_NO_LOCK_PREFIX();
8056 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8057 {
8058 iReg |= pIemCpu->uRexB;
8059 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8060 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8061 }
8062
8063 switch (pIemCpu->enmEffOpSize)
8064 {
8065 case IEMMODE_16BIT:
8066 IEM_MC_BEGIN(0, 1);
8067 IEM_MC_LOCAL(uint16_t, u16Value);
8068 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8069 IEM_MC_PUSH_U16(u16Value);
8070 IEM_MC_ADVANCE_RIP();
8071 IEM_MC_END();
8072 break;
8073
8074 case IEMMODE_32BIT:
8075 IEM_MC_BEGIN(0, 1);
8076 IEM_MC_LOCAL(uint32_t, u32Value);
8077 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8078 IEM_MC_PUSH_U32(u32Value);
8079 IEM_MC_ADVANCE_RIP();
8080 IEM_MC_END();
8081 break;
8082
8083 case IEMMODE_64BIT:
8084 IEM_MC_BEGIN(0, 1);
8085 IEM_MC_LOCAL(uint64_t, u64Value);
8086 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8087 IEM_MC_PUSH_U64(u64Value);
8088 IEM_MC_ADVANCE_RIP();
8089 IEM_MC_END();
8090 break;
8091 }
8092
8093 return VINF_SUCCESS;
8094}
8095
8096
8097/** Opcode 0x50. */
8098FNIEMOP_DEF(iemOp_push_eAX)
8099{
8100 IEMOP_MNEMONIC("push rAX");
8101 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8102}
8103
8104
8105/** Opcode 0x51. */
8106FNIEMOP_DEF(iemOp_push_eCX)
8107{
8108 IEMOP_MNEMONIC("push rCX");
8109 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8110}
8111
8112
8113/** Opcode 0x52. */
8114FNIEMOP_DEF(iemOp_push_eDX)
8115{
8116 IEMOP_MNEMONIC("push rDX");
8117 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8118}
8119
8120
8121/** Opcode 0x53. */
8122FNIEMOP_DEF(iemOp_push_eBX)
8123{
8124 IEMOP_MNEMONIC("push rBX");
8125 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8126}
8127
8128
8129/** Opcode 0x54. */
8130FNIEMOP_DEF(iemOp_push_eSP)
8131{
8132 IEMOP_MNEMONIC("push rSP");
8133 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8134 {
8135 IEM_MC_BEGIN(0, 1);
8136 IEM_MC_LOCAL(uint16_t, u16Value);
8137 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8138 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8139 IEM_MC_PUSH_U16(u16Value);
8140 IEM_MC_ADVANCE_RIP();
8141 IEM_MC_END();
8142 }
8143 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8144}
8145
8146
8147/** Opcode 0x55. */
8148FNIEMOP_DEF(iemOp_push_eBP)
8149{
8150 IEMOP_MNEMONIC("push rBP");
8151 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8152}
8153
8154
8155/** Opcode 0x56. */
8156FNIEMOP_DEF(iemOp_push_eSI)
8157{
8158 IEMOP_MNEMONIC("push rSI");
8159 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8160}
8161
8162
8163/** Opcode 0x57. */
8164FNIEMOP_DEF(iemOp_push_eDI)
8165{
8166 IEMOP_MNEMONIC("push rDI");
8167 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8168}
8169
8170
8171/**
8172 * Common 'pop register' helper.
8173 */
8174FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8175{
8176 IEMOP_HLP_NO_LOCK_PREFIX();
8177 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8178 {
8179 iReg |= pIemCpu->uRexB;
8180 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8181 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8182 }
8183
8184 switch (pIemCpu->enmEffOpSize)
8185 {
8186 case IEMMODE_16BIT:
8187 IEM_MC_BEGIN(0, 1);
8188 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8189 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8190 IEM_MC_POP_U16(pu16Dst);
8191 IEM_MC_ADVANCE_RIP();
8192 IEM_MC_END();
8193 break;
8194
8195 case IEMMODE_32BIT:
8196 IEM_MC_BEGIN(0, 1);
8197 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8198 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8199 IEM_MC_POP_U32(pu32Dst);
8200 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8201 IEM_MC_ADVANCE_RIP();
8202 IEM_MC_END();
8203 break;
8204
8205 case IEMMODE_64BIT:
8206 IEM_MC_BEGIN(0, 1);
8207 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8208 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8209 IEM_MC_POP_U64(pu64Dst);
8210 IEM_MC_ADVANCE_RIP();
8211 IEM_MC_END();
8212 break;
8213 }
8214
8215 return VINF_SUCCESS;
8216}
8217
8218
8219/** Opcode 0x58. */
8220FNIEMOP_DEF(iemOp_pop_eAX)
8221{
8222 IEMOP_MNEMONIC("pop rAX");
8223 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8224}
8225
8226
8227/** Opcode 0x59. */
8228FNIEMOP_DEF(iemOp_pop_eCX)
8229{
8230 IEMOP_MNEMONIC("pop rCX");
8231 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8232}
8233
8234
8235/** Opcode 0x5a. */
8236FNIEMOP_DEF(iemOp_pop_eDX)
8237{
8238 IEMOP_MNEMONIC("pop rDX");
8239 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8240}
8241
8242
8243/** Opcode 0x5b. */
8244FNIEMOP_DEF(iemOp_pop_eBX)
8245{
8246 IEMOP_MNEMONIC("pop rBX");
8247 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8248}
8249
8250
8251/** Opcode 0x5c. */
8252FNIEMOP_DEF(iemOp_pop_eSP)
8253{
8254 IEMOP_MNEMONIC("pop rSP");
8255 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8256 {
8257 if (pIemCpu->uRexB)
8258 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8259 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8260 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8261 }
8262
8263 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8264 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8265 /** @todo add testcase for this instruction. */
8266 switch (pIemCpu->enmEffOpSize)
8267 {
8268 case IEMMODE_16BIT:
8269 IEM_MC_BEGIN(0, 1);
8270 IEM_MC_LOCAL(uint16_t, u16Dst);
8271 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8272 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8273 IEM_MC_ADVANCE_RIP();
8274 IEM_MC_END();
8275 break;
8276
8277 case IEMMODE_32BIT:
8278 IEM_MC_BEGIN(0, 1);
8279 IEM_MC_LOCAL(uint32_t, u32Dst);
8280 IEM_MC_POP_U32(&u32Dst);
8281 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8282 IEM_MC_ADVANCE_RIP();
8283 IEM_MC_END();
8284 break;
8285
8286 case IEMMODE_64BIT:
8287 IEM_MC_BEGIN(0, 1);
8288 IEM_MC_LOCAL(uint64_t, u64Dst);
8289 IEM_MC_POP_U64(&u64Dst);
8290 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8291 IEM_MC_ADVANCE_RIP();
8292 IEM_MC_END();
8293 break;
8294 }
8295
8296 return VINF_SUCCESS;
8297}
8298
8299
8300/** Opcode 0x5d. */
8301FNIEMOP_DEF(iemOp_pop_eBP)
8302{
8303 IEMOP_MNEMONIC("pop rBP");
8304 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8305}
8306
8307
8308/** Opcode 0x5e. */
8309FNIEMOP_DEF(iemOp_pop_eSI)
8310{
8311 IEMOP_MNEMONIC("pop rSI");
8312 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8313}
8314
8315
8316/** Opcode 0x5f. */
8317FNIEMOP_DEF(iemOp_pop_eDI)
8318{
8319 IEMOP_MNEMONIC("pop rDI");
8320 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8321}
8322
8323
8324/** Opcode 0x60. */
8325FNIEMOP_DEF(iemOp_pusha)
8326{
8327 IEMOP_MNEMONIC("pusha");
8328 IEMOP_HLP_MIN_186();
8329 IEMOP_HLP_NO_64BIT();
8330 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8331 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8332 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8333 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8334}
8335
8336
8337/** Opcode 0x61. */
8338FNIEMOP_DEF(iemOp_popa)
8339{
8340 IEMOP_MNEMONIC("popa");
8341 IEMOP_HLP_MIN_186();
8342 IEMOP_HLP_NO_64BIT();
8343 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8344 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8345 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8346 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8347}
8348
8349
8350/** Opcode 0x62. */
8351FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8352// IEMOP_HLP_MIN_186();
8353
8354
8355/** Opcode 0x63 - non-64-bit modes. */
8356FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8357{
8358 IEMOP_MNEMONIC("arpl Ew,Gw");
8359 IEMOP_HLP_MIN_286();
8360 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8362
8363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8364 {
8365 /* Register */
8366 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8367 IEM_MC_BEGIN(3, 0);
8368 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8369 IEM_MC_ARG(uint16_t, u16Src, 1);
8370 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8371
8372 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8373 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8374 IEM_MC_REF_EFLAGS(pEFlags);
8375 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8376
8377 IEM_MC_ADVANCE_RIP();
8378 IEM_MC_END();
8379 }
8380 else
8381 {
8382 /* Memory */
8383 IEM_MC_BEGIN(3, 2);
8384 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8385 IEM_MC_ARG(uint16_t, u16Src, 1);
8386 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8388
8389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8390 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8391 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8392 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8393 IEM_MC_FETCH_EFLAGS(EFlags);
8394 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8395
8396 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8397 IEM_MC_COMMIT_EFLAGS(EFlags);
8398 IEM_MC_ADVANCE_RIP();
8399 IEM_MC_END();
8400 }
8401 return VINF_SUCCESS;
8402
8403}
8404
8405
8406/** Opcode 0x63.
8407 * @note This is a weird one. It works like a regular move instruction if
8408 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8409 * @todo This definitely needs a testcase to verify the odd cases. */
8410FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8411{
8412 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8413
8414 IEMOP_MNEMONIC("movsxd Gv,Ev");
8415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8416
8417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8418 {
8419 /*
8420 * Register to register.
8421 */
8422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8423 IEM_MC_BEGIN(0, 1);
8424 IEM_MC_LOCAL(uint64_t, u64Value);
8425 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8426 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8427 IEM_MC_ADVANCE_RIP();
8428 IEM_MC_END();
8429 }
8430 else
8431 {
8432 /*
8433 * We're loading a register from memory.
8434 */
8435 IEM_MC_BEGIN(0, 2);
8436 IEM_MC_LOCAL(uint64_t, u64Value);
8437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8440 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8441 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8442 IEM_MC_ADVANCE_RIP();
8443 IEM_MC_END();
8444 }
8445 return VINF_SUCCESS;
8446}
8447
8448
8449/** Opcode 0x64. */
8450FNIEMOP_DEF(iemOp_seg_FS)
8451{
8452 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8453 IEMOP_HLP_MIN_386();
8454
8455 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8456 pIemCpu->iEffSeg = X86_SREG_FS;
8457
8458 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8459 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8460}
8461
8462
8463/** Opcode 0x65. */
8464FNIEMOP_DEF(iemOp_seg_GS)
8465{
8466 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8467 IEMOP_HLP_MIN_386();
8468
8469 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8470 pIemCpu->iEffSeg = X86_SREG_GS;
8471
8472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8473 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8474}
8475
8476
8477/** Opcode 0x66. */
8478FNIEMOP_DEF(iemOp_op_size)
8479{
8480 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8481 IEMOP_HLP_MIN_386();
8482
8483 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8484 iemRecalEffOpSize(pIemCpu);
8485
8486 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8487 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8488}
8489
8490
8491/** Opcode 0x67. */
8492FNIEMOP_DEF(iemOp_addr_size)
8493{
8494 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8495 IEMOP_HLP_MIN_386();
8496
8497 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8498 switch (pIemCpu->enmDefAddrMode)
8499 {
8500 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8501 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8502 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8503 default: AssertFailed();
8504 }
8505
8506 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8507 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8508}
8509
8510
8511/** Opcode 0x68. */
8512FNIEMOP_DEF(iemOp_push_Iz)
8513{
8514 IEMOP_MNEMONIC("push Iz");
8515 IEMOP_HLP_MIN_186();
8516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8517 switch (pIemCpu->enmEffOpSize)
8518 {
8519 case IEMMODE_16BIT:
8520 {
8521 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8522 IEMOP_HLP_NO_LOCK_PREFIX();
8523 IEM_MC_BEGIN(0,0);
8524 IEM_MC_PUSH_U16(u16Imm);
8525 IEM_MC_ADVANCE_RIP();
8526 IEM_MC_END();
8527 return VINF_SUCCESS;
8528 }
8529
8530 case IEMMODE_32BIT:
8531 {
8532 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8533 IEMOP_HLP_NO_LOCK_PREFIX();
8534 IEM_MC_BEGIN(0,0);
8535 IEM_MC_PUSH_U32(u32Imm);
8536 IEM_MC_ADVANCE_RIP();
8537 IEM_MC_END();
8538 return VINF_SUCCESS;
8539 }
8540
8541 case IEMMODE_64BIT:
8542 {
8543 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8544 IEMOP_HLP_NO_LOCK_PREFIX();
8545 IEM_MC_BEGIN(0,0);
8546 IEM_MC_PUSH_U64(u64Imm);
8547 IEM_MC_ADVANCE_RIP();
8548 IEM_MC_END();
8549 return VINF_SUCCESS;
8550 }
8551
8552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8553 }
8554}
8555
8556
8557/** Opcode 0x69. */
8558FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8559{
8560 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8561 IEMOP_HLP_MIN_186();
8562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8563 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8564
8565 switch (pIemCpu->enmEffOpSize)
8566 {
8567 case IEMMODE_16BIT:
8568 {
8569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8570 {
8571 /* register operand */
8572 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574
8575 IEM_MC_BEGIN(3, 1);
8576 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8577 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8579 IEM_MC_LOCAL(uint16_t, u16Tmp);
8580
8581 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8582 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8583 IEM_MC_REF_EFLAGS(pEFlags);
8584 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8585 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8586
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 }
8590 else
8591 {
8592 /* memory operand */
8593 IEM_MC_BEGIN(3, 2);
8594 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8595 IEM_MC_ARG(uint16_t, u16Src, 1);
8596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8597 IEM_MC_LOCAL(uint16_t, u16Tmp);
8598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8599
8600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8601 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8602 IEM_MC_ASSIGN(u16Src, u16Imm);
8603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8604 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8605 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8606 IEM_MC_REF_EFLAGS(pEFlags);
8607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8608 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8609
8610 IEM_MC_ADVANCE_RIP();
8611 IEM_MC_END();
8612 }
8613 return VINF_SUCCESS;
8614 }
8615
8616 case IEMMODE_32BIT:
8617 {
8618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8619 {
8620 /* register operand */
8621 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623
8624 IEM_MC_BEGIN(3, 1);
8625 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8626 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8627 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8628 IEM_MC_LOCAL(uint32_t, u32Tmp);
8629
8630 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8631 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8632 IEM_MC_REF_EFLAGS(pEFlags);
8633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8634 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8635
8636 IEM_MC_ADVANCE_RIP();
8637 IEM_MC_END();
8638 }
8639 else
8640 {
8641 /* memory operand */
8642 IEM_MC_BEGIN(3, 2);
8643 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8644 IEM_MC_ARG(uint32_t, u32Src, 1);
8645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8646 IEM_MC_LOCAL(uint32_t, u32Tmp);
8647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8648
8649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8650 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8651 IEM_MC_ASSIGN(u32Src, u32Imm);
8652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8653 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8654 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8655 IEM_MC_REF_EFLAGS(pEFlags);
8656 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8657 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8658
8659 IEM_MC_ADVANCE_RIP();
8660 IEM_MC_END();
8661 }
8662 return VINF_SUCCESS;
8663 }
8664
8665 case IEMMODE_64BIT:
8666 {
8667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8668 {
8669 /* register operand */
8670 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8672
8673 IEM_MC_BEGIN(3, 1);
8674 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8675 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8677 IEM_MC_LOCAL(uint64_t, u64Tmp);
8678
8679 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8680 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8681 IEM_MC_REF_EFLAGS(pEFlags);
8682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8683 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8684
8685 IEM_MC_ADVANCE_RIP();
8686 IEM_MC_END();
8687 }
8688 else
8689 {
8690 /* memory operand */
8691 IEM_MC_BEGIN(3, 2);
8692 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8693 IEM_MC_ARG(uint64_t, u64Src, 1);
8694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8695 IEM_MC_LOCAL(uint64_t, u64Tmp);
8696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8697
8698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8699 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8700 IEM_MC_ASSIGN(u64Src, u64Imm);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8703 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8704 IEM_MC_REF_EFLAGS(pEFlags);
8705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8706 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8707
8708 IEM_MC_ADVANCE_RIP();
8709 IEM_MC_END();
8710 }
8711 return VINF_SUCCESS;
8712 }
8713 }
8714 AssertFailedReturn(VERR_IEM_IPE_9);
8715}
8716
8717
8718/** Opcode 0x6a. */
8719FNIEMOP_DEF(iemOp_push_Ib)
8720{
8721 IEMOP_MNEMONIC("push Ib");
8722 IEMOP_HLP_MIN_186();
8723 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8724 IEMOP_HLP_NO_LOCK_PREFIX();
8725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8726
8727 IEM_MC_BEGIN(0,0);
8728 switch (pIemCpu->enmEffOpSize)
8729 {
8730 case IEMMODE_16BIT:
8731 IEM_MC_PUSH_U16(i8Imm);
8732 break;
8733 case IEMMODE_32BIT:
8734 IEM_MC_PUSH_U32(i8Imm);
8735 break;
8736 case IEMMODE_64BIT:
8737 IEM_MC_PUSH_U64(i8Imm);
8738 break;
8739 }
8740 IEM_MC_ADVANCE_RIP();
8741 IEM_MC_END();
8742 return VINF_SUCCESS;
8743}
8744
8745
8746/** Opcode 0x6b. */
8747FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8748{
8749 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8750 IEMOP_HLP_MIN_186();
8751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8753
8754 switch (pIemCpu->enmEffOpSize)
8755 {
8756 case IEMMODE_16BIT:
8757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8758 {
8759 /* register operand */
8760 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762
8763 IEM_MC_BEGIN(3, 1);
8764 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8765 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8767 IEM_MC_LOCAL(uint16_t, u16Tmp);
8768
8769 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8770 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8771 IEM_MC_REF_EFLAGS(pEFlags);
8772 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8773 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8774
8775 IEM_MC_ADVANCE_RIP();
8776 IEM_MC_END();
8777 }
8778 else
8779 {
8780 /* memory operand */
8781 IEM_MC_BEGIN(3, 2);
8782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8783 IEM_MC_ARG(uint16_t, u16Src, 1);
8784 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8785 IEM_MC_LOCAL(uint16_t, u16Tmp);
8786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8787
8788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8789 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8790 IEM_MC_ASSIGN(u16Src, u16Imm);
8791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8792 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8793 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8794 IEM_MC_REF_EFLAGS(pEFlags);
8795 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8796 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8797
8798 IEM_MC_ADVANCE_RIP();
8799 IEM_MC_END();
8800 }
8801 return VINF_SUCCESS;
8802
8803 case IEMMODE_32BIT:
8804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8805 {
8806 /* register operand */
8807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8809
8810 IEM_MC_BEGIN(3, 1);
8811 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8812 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8814 IEM_MC_LOCAL(uint32_t, u32Tmp);
8815
8816 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8817 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8818 IEM_MC_REF_EFLAGS(pEFlags);
8819 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8820 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8821
8822 IEM_MC_ADVANCE_RIP();
8823 IEM_MC_END();
8824 }
8825 else
8826 {
8827 /* memory operand */
8828 IEM_MC_BEGIN(3, 2);
8829 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8830 IEM_MC_ARG(uint32_t, u32Src, 1);
8831 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8832 IEM_MC_LOCAL(uint32_t, u32Tmp);
8833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8834
8835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8836 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8837 IEM_MC_ASSIGN(u32Src, u32Imm);
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8840 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8841 IEM_MC_REF_EFLAGS(pEFlags);
8842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8843 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8844
8845 IEM_MC_ADVANCE_RIP();
8846 IEM_MC_END();
8847 }
8848 return VINF_SUCCESS;
8849
8850 case IEMMODE_64BIT:
8851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8852 {
8853 /* register operand */
8854 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8856
8857 IEM_MC_BEGIN(3, 1);
8858 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8859 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8861 IEM_MC_LOCAL(uint64_t, u64Tmp);
8862
8863 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8864 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8865 IEM_MC_REF_EFLAGS(pEFlags);
8866 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8867 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8868
8869 IEM_MC_ADVANCE_RIP();
8870 IEM_MC_END();
8871 }
8872 else
8873 {
8874 /* memory operand */
8875 IEM_MC_BEGIN(3, 2);
8876 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8877 IEM_MC_ARG(uint64_t, u64Src, 1);
8878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8879 IEM_MC_LOCAL(uint64_t, u64Tmp);
8880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8881
8882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8883 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8884 IEM_MC_ASSIGN(u64Src, u64Imm);
8885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8886 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8887 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8888 IEM_MC_REF_EFLAGS(pEFlags);
8889 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8890 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8891
8892 IEM_MC_ADVANCE_RIP();
8893 IEM_MC_END();
8894 }
8895 return VINF_SUCCESS;
8896 }
8897 AssertFailedReturn(VERR_IEM_IPE_8);
8898}
8899
8900
8901/** Opcode 0x6c. */
8902FNIEMOP_DEF(iemOp_insb_Yb_DX)
8903{
8904 IEMOP_HLP_MIN_186();
8905 IEMOP_HLP_NO_LOCK_PREFIX();
8906 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8907 {
8908 IEMOP_MNEMONIC("rep ins Yb,DX");
8909 switch (pIemCpu->enmEffAddrMode)
8910 {
8911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8915 }
8916 }
8917 else
8918 {
8919 IEMOP_MNEMONIC("ins Yb,DX");
8920 switch (pIemCpu->enmEffAddrMode)
8921 {
8922 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8923 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8924 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8926 }
8927 }
8928}
8929
8930
8931/** Opcode 0x6d. */
8932FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8933{
8934 IEMOP_HLP_MIN_186();
8935 IEMOP_HLP_NO_LOCK_PREFIX();
8936 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8937 {
8938 IEMOP_MNEMONIC("rep ins Yv,DX");
8939 switch (pIemCpu->enmEffOpSize)
8940 {
8941 case IEMMODE_16BIT:
8942 switch (pIemCpu->enmEffAddrMode)
8943 {
8944 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8945 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8946 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8948 }
8949 break;
8950 case IEMMODE_64BIT:
8951 case IEMMODE_32BIT:
8952 switch (pIemCpu->enmEffAddrMode)
8953 {
8954 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8955 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8956 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8958 }
8959 break;
8960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8961 }
8962 }
8963 else
8964 {
8965 IEMOP_MNEMONIC("ins Yv,DX");
8966 switch (pIemCpu->enmEffOpSize)
8967 {
8968 case IEMMODE_16BIT:
8969 switch (pIemCpu->enmEffAddrMode)
8970 {
8971 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8972 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8973 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8975 }
8976 break;
8977 case IEMMODE_64BIT:
8978 case IEMMODE_32BIT:
8979 switch (pIemCpu->enmEffAddrMode)
8980 {
8981 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8982 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8983 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8985 }
8986 break;
8987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8988 }
8989 }
8990}
8991
8992
8993/** Opcode 0x6e. */
8994FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8995{
8996 IEMOP_HLP_MIN_186();
8997 IEMOP_HLP_NO_LOCK_PREFIX();
8998 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8999 {
9000 IEMOP_MNEMONIC("rep outs DX,Yb");
9001 switch (pIemCpu->enmEffAddrMode)
9002 {
9003 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9004 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9005 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9007 }
9008 }
9009 else
9010 {
9011 IEMOP_MNEMONIC("outs DX,Yb");
9012 switch (pIemCpu->enmEffAddrMode)
9013 {
9014 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9015 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9016 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9018 }
9019 }
9020}
9021
9022
9023/** Opcode 0x6f. */
9024FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9025{
9026 IEMOP_HLP_MIN_186();
9027 IEMOP_HLP_NO_LOCK_PREFIX();
9028 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9029 {
9030 IEMOP_MNEMONIC("rep outs DX,Yv");
9031 switch (pIemCpu->enmEffOpSize)
9032 {
9033 case IEMMODE_16BIT:
9034 switch (pIemCpu->enmEffAddrMode)
9035 {
9036 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9037 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9038 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9040 }
9041 break;
9042 case IEMMODE_64BIT:
9043 case IEMMODE_32BIT:
9044 switch (pIemCpu->enmEffAddrMode)
9045 {
9046 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9047 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9048 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9050 }
9051 break;
9052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9053 }
9054 }
9055 else
9056 {
9057 IEMOP_MNEMONIC("outs DX,Yv");
9058 switch (pIemCpu->enmEffOpSize)
9059 {
9060 case IEMMODE_16BIT:
9061 switch (pIemCpu->enmEffAddrMode)
9062 {
9063 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9064 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9065 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9067 }
9068 break;
9069 case IEMMODE_64BIT:
9070 case IEMMODE_32BIT:
9071 switch (pIemCpu->enmEffAddrMode)
9072 {
9073 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9074 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9075 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9077 }
9078 break;
9079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9080 }
9081 }
9082}
9083
9084
9085/** Opcode 0x70. */
9086FNIEMOP_DEF(iemOp_jo_Jb)
9087{
9088 IEMOP_MNEMONIC("jo Jb");
9089 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9090 IEMOP_HLP_NO_LOCK_PREFIX();
9091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9092
9093 IEM_MC_BEGIN(0, 0);
9094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9095 IEM_MC_REL_JMP_S8(i8Imm);
9096 } IEM_MC_ELSE() {
9097 IEM_MC_ADVANCE_RIP();
9098 } IEM_MC_ENDIF();
9099 IEM_MC_END();
9100 return VINF_SUCCESS;
9101}
9102
9103
9104/** Opcode 0x71. */
9105FNIEMOP_DEF(iemOp_jno_Jb)
9106{
9107 IEMOP_MNEMONIC("jno Jb");
9108 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9109 IEMOP_HLP_NO_LOCK_PREFIX();
9110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9111
9112 IEM_MC_BEGIN(0, 0);
9113 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9114 IEM_MC_ADVANCE_RIP();
9115 } IEM_MC_ELSE() {
9116 IEM_MC_REL_JMP_S8(i8Imm);
9117 } IEM_MC_ENDIF();
9118 IEM_MC_END();
9119 return VINF_SUCCESS;
9120}
9121
9122/** Opcode 0x72. */
9123FNIEMOP_DEF(iemOp_jc_Jb)
9124{
9125 IEMOP_MNEMONIC("jc/jnae Jb");
9126 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9127 IEMOP_HLP_NO_LOCK_PREFIX();
9128 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9129
9130 IEM_MC_BEGIN(0, 0);
9131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9132 IEM_MC_REL_JMP_S8(i8Imm);
9133 } IEM_MC_ELSE() {
9134 IEM_MC_ADVANCE_RIP();
9135 } IEM_MC_ENDIF();
9136 IEM_MC_END();
9137 return VINF_SUCCESS;
9138}
9139
9140
9141/** Opcode 0x73. */
9142FNIEMOP_DEF(iemOp_jnc_Jb)
9143{
9144 IEMOP_MNEMONIC("jnc/jnb Jb");
9145 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9146 IEMOP_HLP_NO_LOCK_PREFIX();
9147 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9148
9149 IEM_MC_BEGIN(0, 0);
9150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9151 IEM_MC_ADVANCE_RIP();
9152 } IEM_MC_ELSE() {
9153 IEM_MC_REL_JMP_S8(i8Imm);
9154 } IEM_MC_ENDIF();
9155 IEM_MC_END();
9156 return VINF_SUCCESS;
9157}
9158
9159
9160/** Opcode 0x74. */
9161FNIEMOP_DEF(iemOp_je_Jb)
9162{
9163 IEMOP_MNEMONIC("je/jz Jb");
9164 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9165 IEMOP_HLP_NO_LOCK_PREFIX();
9166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9167
9168 IEM_MC_BEGIN(0, 0);
9169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9170 IEM_MC_REL_JMP_S8(i8Imm);
9171 } IEM_MC_ELSE() {
9172 IEM_MC_ADVANCE_RIP();
9173 } IEM_MC_ENDIF();
9174 IEM_MC_END();
9175 return VINF_SUCCESS;
9176}
9177
9178
9179/** Opcode 0x75. */
9180FNIEMOP_DEF(iemOp_jne_Jb)
9181{
9182 IEMOP_MNEMONIC("jne/jnz Jb");
9183 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9184 IEMOP_HLP_NO_LOCK_PREFIX();
9185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9186
9187 IEM_MC_BEGIN(0, 0);
9188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9189 IEM_MC_ADVANCE_RIP();
9190 } IEM_MC_ELSE() {
9191 IEM_MC_REL_JMP_S8(i8Imm);
9192 } IEM_MC_ENDIF();
9193 IEM_MC_END();
9194 return VINF_SUCCESS;
9195}
9196
9197
9198/** Opcode 0x76. */
9199FNIEMOP_DEF(iemOp_jbe_Jb)
9200{
9201 IEMOP_MNEMONIC("jbe/jna Jb");
9202 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9203 IEMOP_HLP_NO_LOCK_PREFIX();
9204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9205
9206 IEM_MC_BEGIN(0, 0);
9207 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9208 IEM_MC_REL_JMP_S8(i8Imm);
9209 } IEM_MC_ELSE() {
9210 IEM_MC_ADVANCE_RIP();
9211 } IEM_MC_ENDIF();
9212 IEM_MC_END();
9213 return VINF_SUCCESS;
9214}
9215
9216
9217/** Opcode 0x77. */
9218FNIEMOP_DEF(iemOp_jnbe_Jb)
9219{
9220 IEMOP_MNEMONIC("jnbe/ja Jb");
9221 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9222 IEMOP_HLP_NO_LOCK_PREFIX();
9223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9224
9225 IEM_MC_BEGIN(0, 0);
9226 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9227 IEM_MC_ADVANCE_RIP();
9228 } IEM_MC_ELSE() {
9229 IEM_MC_REL_JMP_S8(i8Imm);
9230 } IEM_MC_ENDIF();
9231 IEM_MC_END();
9232 return VINF_SUCCESS;
9233}
9234
9235
9236/** Opcode 0x78. */
9237FNIEMOP_DEF(iemOp_js_Jb)
9238{
9239 IEMOP_MNEMONIC("js Jb");
9240 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9241 IEMOP_HLP_NO_LOCK_PREFIX();
9242 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9243
9244 IEM_MC_BEGIN(0, 0);
9245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9246 IEM_MC_REL_JMP_S8(i8Imm);
9247 } IEM_MC_ELSE() {
9248 IEM_MC_ADVANCE_RIP();
9249 } IEM_MC_ENDIF();
9250 IEM_MC_END();
9251 return VINF_SUCCESS;
9252}
9253
9254
9255/** Opcode 0x79. */
9256FNIEMOP_DEF(iemOp_jns_Jb)
9257{
9258 IEMOP_MNEMONIC("jns Jb");
9259 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9260 IEMOP_HLP_NO_LOCK_PREFIX();
9261 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9262
9263 IEM_MC_BEGIN(0, 0);
9264 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9265 IEM_MC_ADVANCE_RIP();
9266 } IEM_MC_ELSE() {
9267 IEM_MC_REL_JMP_S8(i8Imm);
9268 } IEM_MC_ENDIF();
9269 IEM_MC_END();
9270 return VINF_SUCCESS;
9271}
9272
9273
9274/** Opcode 0x7a. */
9275FNIEMOP_DEF(iemOp_jp_Jb)
9276{
9277 IEMOP_MNEMONIC("jp Jb");
9278 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9279 IEMOP_HLP_NO_LOCK_PREFIX();
9280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9281
9282 IEM_MC_BEGIN(0, 0);
9283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9284 IEM_MC_REL_JMP_S8(i8Imm);
9285 } IEM_MC_ELSE() {
9286 IEM_MC_ADVANCE_RIP();
9287 } IEM_MC_ENDIF();
9288 IEM_MC_END();
9289 return VINF_SUCCESS;
9290}
9291
9292
9293/** Opcode 0x7b. */
9294FNIEMOP_DEF(iemOp_jnp_Jb)
9295{
9296 IEMOP_MNEMONIC("jnp Jb");
9297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9298 IEMOP_HLP_NO_LOCK_PREFIX();
9299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9300
9301 IEM_MC_BEGIN(0, 0);
9302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9303 IEM_MC_ADVANCE_RIP();
9304 } IEM_MC_ELSE() {
9305 IEM_MC_REL_JMP_S8(i8Imm);
9306 } IEM_MC_ENDIF();
9307 IEM_MC_END();
9308 return VINF_SUCCESS;
9309}
9310
9311
9312/** Opcode 0x7c. */
9313FNIEMOP_DEF(iemOp_jl_Jb)
9314{
9315 IEMOP_MNEMONIC("jl/jnge Jb");
9316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9317 IEMOP_HLP_NO_LOCK_PREFIX();
9318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9319
9320 IEM_MC_BEGIN(0, 0);
9321 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9322 IEM_MC_REL_JMP_S8(i8Imm);
9323 } IEM_MC_ELSE() {
9324 IEM_MC_ADVANCE_RIP();
9325 } IEM_MC_ENDIF();
9326 IEM_MC_END();
9327 return VINF_SUCCESS;
9328}
9329
9330
9331/** Opcode 0x7d. */
9332FNIEMOP_DEF(iemOp_jnl_Jb)
9333{
9334 IEMOP_MNEMONIC("jnl/jge Jb");
9335 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9336 IEMOP_HLP_NO_LOCK_PREFIX();
9337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9338
9339 IEM_MC_BEGIN(0, 0);
9340 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9341 IEM_MC_ADVANCE_RIP();
9342 } IEM_MC_ELSE() {
9343 IEM_MC_REL_JMP_S8(i8Imm);
9344 } IEM_MC_ENDIF();
9345 IEM_MC_END();
9346 return VINF_SUCCESS;
9347}
9348
9349
9350/** Opcode 0x7e. */
9351FNIEMOP_DEF(iemOp_jle_Jb)
9352{
9353 IEMOP_MNEMONIC("jle/jng Jb");
9354 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9355 IEMOP_HLP_NO_LOCK_PREFIX();
9356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9357
9358 IEM_MC_BEGIN(0, 0);
9359 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9360 IEM_MC_REL_JMP_S8(i8Imm);
9361 } IEM_MC_ELSE() {
9362 IEM_MC_ADVANCE_RIP();
9363 } IEM_MC_ENDIF();
9364 IEM_MC_END();
9365 return VINF_SUCCESS;
9366}
9367
9368
9369/** Opcode 0x7f. */
9370FNIEMOP_DEF(iemOp_jnle_Jb)
9371{
9372 IEMOP_MNEMONIC("jnle/jg Jb");
9373 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9374 IEMOP_HLP_NO_LOCK_PREFIX();
9375 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9376
9377 IEM_MC_BEGIN(0, 0);
9378 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9379 IEM_MC_ADVANCE_RIP();
9380 } IEM_MC_ELSE() {
9381 IEM_MC_REL_JMP_S8(i8Imm);
9382 } IEM_MC_ENDIF();
9383 IEM_MC_END();
9384 return VINF_SUCCESS;
9385}
9386
9387
9388/** Opcode 0x80. */
9389FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9390{
9391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9392 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9393 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9394
9395 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9396 {
9397 /* register target */
9398 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9399 IEMOP_HLP_NO_LOCK_PREFIX();
9400 IEM_MC_BEGIN(3, 0);
9401 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9402 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9403 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9404
9405 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9406 IEM_MC_REF_EFLAGS(pEFlags);
9407 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9408
9409 IEM_MC_ADVANCE_RIP();
9410 IEM_MC_END();
9411 }
9412 else
9413 {
9414 /* memory target */
9415 uint32_t fAccess;
9416 if (pImpl->pfnLockedU8)
9417 fAccess = IEM_ACCESS_DATA_RW;
9418 else
9419 { /* CMP */
9420 IEMOP_HLP_NO_LOCK_PREFIX();
9421 fAccess = IEM_ACCESS_DATA_R;
9422 }
9423 IEM_MC_BEGIN(3, 2);
9424 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9427
9428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9429 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9430 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9431
9432 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9433 IEM_MC_FETCH_EFLAGS(EFlags);
9434 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9436 else
9437 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9438
9439 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9440 IEM_MC_COMMIT_EFLAGS(EFlags);
9441 IEM_MC_ADVANCE_RIP();
9442 IEM_MC_END();
9443 }
9444 return VINF_SUCCESS;
9445}
9446
9447
9448/** Opcode 0x81. */
9449FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9450{
9451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9452 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9453 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9454
9455 switch (pIemCpu->enmEffOpSize)
9456 {
9457 case IEMMODE_16BIT:
9458 {
9459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9460 {
9461 /* register target */
9462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9463 IEMOP_HLP_NO_LOCK_PREFIX();
9464 IEM_MC_BEGIN(3, 0);
9465 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9466 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9468
9469 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9470 IEM_MC_REF_EFLAGS(pEFlags);
9471 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9472
9473 IEM_MC_ADVANCE_RIP();
9474 IEM_MC_END();
9475 }
9476 else
9477 {
9478 /* memory target */
9479 uint32_t fAccess;
9480 if (pImpl->pfnLockedU16)
9481 fAccess = IEM_ACCESS_DATA_RW;
9482 else
9483 { /* CMP, TEST */
9484 IEMOP_HLP_NO_LOCK_PREFIX();
9485 fAccess = IEM_ACCESS_DATA_R;
9486 }
9487 IEM_MC_BEGIN(3, 2);
9488 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9489 IEM_MC_ARG(uint16_t, u16Src, 1);
9490 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9492
9493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9494 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9495 IEM_MC_ASSIGN(u16Src, u16Imm);
9496 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9497 IEM_MC_FETCH_EFLAGS(EFlags);
9498 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9500 else
9501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9502
9503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9504 IEM_MC_COMMIT_EFLAGS(EFlags);
9505 IEM_MC_ADVANCE_RIP();
9506 IEM_MC_END();
9507 }
9508 break;
9509 }
9510
9511 case IEMMODE_32BIT:
9512 {
9513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9514 {
9515 /* register target */
9516 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9517 IEMOP_HLP_NO_LOCK_PREFIX();
9518 IEM_MC_BEGIN(3, 0);
9519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9520 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9522
9523 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9524 IEM_MC_REF_EFLAGS(pEFlags);
9525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9526 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9527
9528 IEM_MC_ADVANCE_RIP();
9529 IEM_MC_END();
9530 }
9531 else
9532 {
9533 /* memory target */
9534 uint32_t fAccess;
9535 if (pImpl->pfnLockedU32)
9536 fAccess = IEM_ACCESS_DATA_RW;
9537 else
9538 { /* CMP, TEST */
9539 IEMOP_HLP_NO_LOCK_PREFIX();
9540 fAccess = IEM_ACCESS_DATA_R;
9541 }
9542 IEM_MC_BEGIN(3, 2);
9543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9544 IEM_MC_ARG(uint32_t, u32Src, 1);
9545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9547
9548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9549 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9550 IEM_MC_ASSIGN(u32Src, u32Imm);
9551 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9552 IEM_MC_FETCH_EFLAGS(EFlags);
9553 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9555 else
9556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9557
9558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9559 IEM_MC_COMMIT_EFLAGS(EFlags);
9560 IEM_MC_ADVANCE_RIP();
9561 IEM_MC_END();
9562 }
9563 break;
9564 }
9565
9566 case IEMMODE_64BIT:
9567 {
9568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9569 {
9570 /* register target */
9571 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9572 IEMOP_HLP_NO_LOCK_PREFIX();
9573 IEM_MC_BEGIN(3, 0);
9574 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9575 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9577
9578 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9579 IEM_MC_REF_EFLAGS(pEFlags);
9580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9581
9582 IEM_MC_ADVANCE_RIP();
9583 IEM_MC_END();
9584 }
9585 else
9586 {
9587 /* memory target */
9588 uint32_t fAccess;
9589 if (pImpl->pfnLockedU64)
9590 fAccess = IEM_ACCESS_DATA_RW;
9591 else
9592 { /* CMP */
9593 IEMOP_HLP_NO_LOCK_PREFIX();
9594 fAccess = IEM_ACCESS_DATA_R;
9595 }
9596 IEM_MC_BEGIN(3, 2);
9597 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9598 IEM_MC_ARG(uint64_t, u64Src, 1);
9599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9601
9602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9603 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9604 IEM_MC_ASSIGN(u64Src, u64Imm);
9605 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9606 IEM_MC_FETCH_EFLAGS(EFlags);
9607 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9609 else
9610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9611
9612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9613 IEM_MC_COMMIT_EFLAGS(EFlags);
9614 IEM_MC_ADVANCE_RIP();
9615 IEM_MC_END();
9616 }
9617 break;
9618 }
9619 }
9620 return VINF_SUCCESS;
9621}
9622
9623
9624/** Opcode 0x82. */
9625FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9626{
9627 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9628 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9629}
9630
9631
9632/** Opcode 0x83. */
9633FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9634{
9635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9636 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9637 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9638 to the 386 even if absent in the intel reference manuals and some
9639 3rd party opcode listings. */
9640 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9641
9642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9643 {
9644 /*
9645 * Register target
9646 */
9647 IEMOP_HLP_NO_LOCK_PREFIX();
9648 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9649 switch (pIemCpu->enmEffOpSize)
9650 {
9651 case IEMMODE_16BIT:
9652 {
9653 IEM_MC_BEGIN(3, 0);
9654 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9655 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9656 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9657
9658 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9659 IEM_MC_REF_EFLAGS(pEFlags);
9660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9661
9662 IEM_MC_ADVANCE_RIP();
9663 IEM_MC_END();
9664 break;
9665 }
9666
9667 case IEMMODE_32BIT:
9668 {
9669 IEM_MC_BEGIN(3, 0);
9670 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9671 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9673
9674 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9675 IEM_MC_REF_EFLAGS(pEFlags);
9676 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9677 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9678
9679 IEM_MC_ADVANCE_RIP();
9680 IEM_MC_END();
9681 break;
9682 }
9683
9684 case IEMMODE_64BIT:
9685 {
9686 IEM_MC_BEGIN(3, 0);
9687 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9688 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9690
9691 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9692 IEM_MC_REF_EFLAGS(pEFlags);
9693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9694
9695 IEM_MC_ADVANCE_RIP();
9696 IEM_MC_END();
9697 break;
9698 }
9699 }
9700 }
9701 else
9702 {
9703 /*
9704 * Memory target.
9705 */
9706 uint32_t fAccess;
9707 if (pImpl->pfnLockedU16)
9708 fAccess = IEM_ACCESS_DATA_RW;
9709 else
9710 { /* CMP */
9711 IEMOP_HLP_NO_LOCK_PREFIX();
9712 fAccess = IEM_ACCESS_DATA_R;
9713 }
9714
9715 switch (pIemCpu->enmEffOpSize)
9716 {
9717 case IEMMODE_16BIT:
9718 {
9719 IEM_MC_BEGIN(3, 2);
9720 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9721 IEM_MC_ARG(uint16_t, u16Src, 1);
9722 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9724
9725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9726 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9727 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9728 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9729 IEM_MC_FETCH_EFLAGS(EFlags);
9730 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9732 else
9733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9734
9735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9736 IEM_MC_COMMIT_EFLAGS(EFlags);
9737 IEM_MC_ADVANCE_RIP();
9738 IEM_MC_END();
9739 break;
9740 }
9741
9742 case IEMMODE_32BIT:
9743 {
9744 IEM_MC_BEGIN(3, 2);
9745 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9746 IEM_MC_ARG(uint32_t, u32Src, 1);
9747 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9749
9750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9751 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9752 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9753 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9754 IEM_MC_FETCH_EFLAGS(EFlags);
9755 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9756 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9757 else
9758 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9759
9760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9761 IEM_MC_COMMIT_EFLAGS(EFlags);
9762 IEM_MC_ADVANCE_RIP();
9763 IEM_MC_END();
9764 break;
9765 }
9766
9767 case IEMMODE_64BIT:
9768 {
9769 IEM_MC_BEGIN(3, 2);
9770 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9771 IEM_MC_ARG(uint64_t, u64Src, 1);
9772 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9774
9775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9776 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9777 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9778 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9779 IEM_MC_FETCH_EFLAGS(EFlags);
9780 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9782 else
9783 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9784
9785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9786 IEM_MC_COMMIT_EFLAGS(EFlags);
9787 IEM_MC_ADVANCE_RIP();
9788 IEM_MC_END();
9789 break;
9790 }
9791 }
9792 }
9793 return VINF_SUCCESS;
9794}
9795
9796
9797/** Opcode 0x84. */
9798FNIEMOP_DEF(iemOp_test_Eb_Gb)
9799{
9800 IEMOP_MNEMONIC("test Eb,Gb");
9801 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9803 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9804}
9805
9806
9807/** Opcode 0x85. */
9808FNIEMOP_DEF(iemOp_test_Ev_Gv)
9809{
9810 IEMOP_MNEMONIC("test Ev,Gv");
9811 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9812 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9813 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9814}
9815
9816
9817/** Opcode 0x86. */
9818FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9819{
9820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9821 IEMOP_MNEMONIC("xchg Eb,Gb");
9822
9823 /*
9824 * If rm is denoting a register, no more instruction bytes.
9825 */
9826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9827 {
9828 IEMOP_HLP_NO_LOCK_PREFIX();
9829
9830 IEM_MC_BEGIN(0, 2);
9831 IEM_MC_LOCAL(uint8_t, uTmp1);
9832 IEM_MC_LOCAL(uint8_t, uTmp2);
9833
9834 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9835 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9836 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9837 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9838
9839 IEM_MC_ADVANCE_RIP();
9840 IEM_MC_END();
9841 }
9842 else
9843 {
9844 /*
9845 * We're accessing memory.
9846 */
9847/** @todo the register must be committed separately! */
9848 IEM_MC_BEGIN(2, 2);
9849 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9850 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9852
9853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9854 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9855 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9856 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9857 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9858
9859 IEM_MC_ADVANCE_RIP();
9860 IEM_MC_END();
9861 }
9862 return VINF_SUCCESS;
9863}
9864
9865
9866/** Opcode 0x87. */
9867FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9868{
9869 IEMOP_MNEMONIC("xchg Ev,Gv");
9870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9871
9872 /*
9873 * If rm is denoting a register, no more instruction bytes.
9874 */
9875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9876 {
9877 IEMOP_HLP_NO_LOCK_PREFIX();
9878
9879 switch (pIemCpu->enmEffOpSize)
9880 {
9881 case IEMMODE_16BIT:
9882 IEM_MC_BEGIN(0, 2);
9883 IEM_MC_LOCAL(uint16_t, uTmp1);
9884 IEM_MC_LOCAL(uint16_t, uTmp2);
9885
9886 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9887 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9888 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9889 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9890
9891 IEM_MC_ADVANCE_RIP();
9892 IEM_MC_END();
9893 return VINF_SUCCESS;
9894
9895 case IEMMODE_32BIT:
9896 IEM_MC_BEGIN(0, 2);
9897 IEM_MC_LOCAL(uint32_t, uTmp1);
9898 IEM_MC_LOCAL(uint32_t, uTmp2);
9899
9900 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9901 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9902 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9903 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9904
9905 IEM_MC_ADVANCE_RIP();
9906 IEM_MC_END();
9907 return VINF_SUCCESS;
9908
9909 case IEMMODE_64BIT:
9910 IEM_MC_BEGIN(0, 2);
9911 IEM_MC_LOCAL(uint64_t, uTmp1);
9912 IEM_MC_LOCAL(uint64_t, uTmp2);
9913
9914 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9915 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9916 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9917 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9918
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 return VINF_SUCCESS;
9922
9923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9924 }
9925 }
9926 else
9927 {
9928 /*
9929 * We're accessing memory.
9930 */
9931 switch (pIemCpu->enmEffOpSize)
9932 {
9933/** @todo the register must be committed separately! */
9934 case IEMMODE_16BIT:
9935 IEM_MC_BEGIN(2, 2);
9936 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9937 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9939
9940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9941 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9942 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9943 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9944 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9945
9946 IEM_MC_ADVANCE_RIP();
9947 IEM_MC_END();
9948 return VINF_SUCCESS;
9949
9950 case IEMMODE_32BIT:
9951 IEM_MC_BEGIN(2, 2);
9952 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9953 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9955
9956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9957 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9958 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9959 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9961
9962 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9963 IEM_MC_ADVANCE_RIP();
9964 IEM_MC_END();
9965 return VINF_SUCCESS;
9966
9967 case IEMMODE_64BIT:
9968 IEM_MC_BEGIN(2, 2);
9969 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9970 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9972
9973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9974 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9975 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9976 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9978
9979 IEM_MC_ADVANCE_RIP();
9980 IEM_MC_END();
9981 return VINF_SUCCESS;
9982
9983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9984 }
9985 }
9986}
9987
9988
9989/** Opcode 0x88. */
9990FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9991{
9992 IEMOP_MNEMONIC("mov Eb,Gb");
9993
9994 uint8_t bRm;
9995 IEM_OPCODE_GET_NEXT_U8(&bRm);
9996 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9997
9998 /*
9999 * If rm is denoting a register, no more instruction bytes.
10000 */
10001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10002 {
10003 IEM_MC_BEGIN(0, 1);
10004 IEM_MC_LOCAL(uint8_t, u8Value);
10005 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10006 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10007 IEM_MC_ADVANCE_RIP();
10008 IEM_MC_END();
10009 }
10010 else
10011 {
10012 /*
10013 * We're writing a register to memory.
10014 */
10015 IEM_MC_BEGIN(0, 2);
10016 IEM_MC_LOCAL(uint8_t, u8Value);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10019 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10020 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10021 IEM_MC_ADVANCE_RIP();
10022 IEM_MC_END();
10023 }
10024 return VINF_SUCCESS;
10025
10026}
10027
10028
10029/** Opcode 0x89. */
10030FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10031{
10032 IEMOP_MNEMONIC("mov Ev,Gv");
10033
10034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10035 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10036
10037 /*
10038 * If rm is denoting a register, no more instruction bytes.
10039 */
10040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10041 {
10042 switch (pIemCpu->enmEffOpSize)
10043 {
10044 case IEMMODE_16BIT:
10045 IEM_MC_BEGIN(0, 1);
10046 IEM_MC_LOCAL(uint16_t, u16Value);
10047 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10048 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10049 IEM_MC_ADVANCE_RIP();
10050 IEM_MC_END();
10051 break;
10052
10053 case IEMMODE_32BIT:
10054 IEM_MC_BEGIN(0, 1);
10055 IEM_MC_LOCAL(uint32_t, u32Value);
10056 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10057 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10058 IEM_MC_ADVANCE_RIP();
10059 IEM_MC_END();
10060 break;
10061
10062 case IEMMODE_64BIT:
10063 IEM_MC_BEGIN(0, 1);
10064 IEM_MC_LOCAL(uint64_t, u64Value);
10065 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10066 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10067 IEM_MC_ADVANCE_RIP();
10068 IEM_MC_END();
10069 break;
10070 }
10071 }
10072 else
10073 {
10074 /*
10075 * We're writing a register to memory.
10076 */
10077 switch (pIemCpu->enmEffOpSize)
10078 {
10079 case IEMMODE_16BIT:
10080 IEM_MC_BEGIN(0, 2);
10081 IEM_MC_LOCAL(uint16_t, u16Value);
10082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10084 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10085 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10086 IEM_MC_ADVANCE_RIP();
10087 IEM_MC_END();
10088 break;
10089
10090 case IEMMODE_32BIT:
10091 IEM_MC_BEGIN(0, 2);
10092 IEM_MC_LOCAL(uint32_t, u32Value);
10093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10095 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10096 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10097 IEM_MC_ADVANCE_RIP();
10098 IEM_MC_END();
10099 break;
10100
10101 case IEMMODE_64BIT:
10102 IEM_MC_BEGIN(0, 2);
10103 IEM_MC_LOCAL(uint64_t, u64Value);
10104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10106 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10107 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10108 IEM_MC_ADVANCE_RIP();
10109 IEM_MC_END();
10110 break;
10111 }
10112 }
10113 return VINF_SUCCESS;
10114}
10115
10116
10117/** Opcode 0x8a. */
10118FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10119{
10120 IEMOP_MNEMONIC("mov Gb,Eb");
10121
10122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10123 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10124
10125 /*
10126 * If rm is denoting a register, no more instruction bytes.
10127 */
10128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10129 {
10130 IEM_MC_BEGIN(0, 1);
10131 IEM_MC_LOCAL(uint8_t, u8Value);
10132 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10133 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10134 IEM_MC_ADVANCE_RIP();
10135 IEM_MC_END();
10136 }
10137 else
10138 {
10139 /*
10140 * We're loading a register from memory.
10141 */
10142 IEM_MC_BEGIN(0, 2);
10143 IEM_MC_LOCAL(uint8_t, u8Value);
10144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10146 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10147 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10148 IEM_MC_ADVANCE_RIP();
10149 IEM_MC_END();
10150 }
10151 return VINF_SUCCESS;
10152}
10153
10154
10155/** Opcode 0x8b. */
10156FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10157{
10158 IEMOP_MNEMONIC("mov Gv,Ev");
10159
10160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10161 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10162
10163 /*
10164 * If rm is denoting a register, no more instruction bytes.
10165 */
10166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10167 {
10168 switch (pIemCpu->enmEffOpSize)
10169 {
10170 case IEMMODE_16BIT:
10171 IEM_MC_BEGIN(0, 1);
10172 IEM_MC_LOCAL(uint16_t, u16Value);
10173 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10174 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10175 IEM_MC_ADVANCE_RIP();
10176 IEM_MC_END();
10177 break;
10178
10179 case IEMMODE_32BIT:
10180 IEM_MC_BEGIN(0, 1);
10181 IEM_MC_LOCAL(uint32_t, u32Value);
10182 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10183 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10184 IEM_MC_ADVANCE_RIP();
10185 IEM_MC_END();
10186 break;
10187
10188 case IEMMODE_64BIT:
10189 IEM_MC_BEGIN(0, 1);
10190 IEM_MC_LOCAL(uint64_t, u64Value);
10191 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10192 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10193 IEM_MC_ADVANCE_RIP();
10194 IEM_MC_END();
10195 break;
10196 }
10197 }
10198 else
10199 {
10200 /*
10201 * We're loading a register from memory.
10202 */
10203 switch (pIemCpu->enmEffOpSize)
10204 {
10205 case IEMMODE_16BIT:
10206 IEM_MC_BEGIN(0, 2);
10207 IEM_MC_LOCAL(uint16_t, u16Value);
10208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10210 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10211 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10212 IEM_MC_ADVANCE_RIP();
10213 IEM_MC_END();
10214 break;
10215
10216 case IEMMODE_32BIT:
10217 IEM_MC_BEGIN(0, 2);
10218 IEM_MC_LOCAL(uint32_t, u32Value);
10219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10221 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10222 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10223 IEM_MC_ADVANCE_RIP();
10224 IEM_MC_END();
10225 break;
10226
10227 case IEMMODE_64BIT:
10228 IEM_MC_BEGIN(0, 2);
10229 IEM_MC_LOCAL(uint64_t, u64Value);
10230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10232 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10233 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10234 IEM_MC_ADVANCE_RIP();
10235 IEM_MC_END();
10236 break;
10237 }
10238 }
10239 return VINF_SUCCESS;
10240}
10241
10242
10243/** Opcode 0x63. */
10244FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10245{
10246 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10247 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10248 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10249 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10250 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10251}
10252
10253
10254/** Opcode 0x8c. */
10255FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10256{
10257 IEMOP_MNEMONIC("mov Ev,Sw");
10258
10259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10260 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10261
10262 /*
10263 * Check that the destination register exists. The REX.R prefix is ignored.
10264 */
10265 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10266 if ( iSegReg > X86_SREG_GS)
10267 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10268
10269 /*
10270 * If rm is denoting a register, no more instruction bytes.
10271 * In that case, the operand size is respected and the upper bits are
10272 * cleared (starting with some pentium).
10273 */
10274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10275 {
10276 switch (pIemCpu->enmEffOpSize)
10277 {
10278 case IEMMODE_16BIT:
10279 IEM_MC_BEGIN(0, 1);
10280 IEM_MC_LOCAL(uint16_t, u16Value);
10281 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10282 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10283 IEM_MC_ADVANCE_RIP();
10284 IEM_MC_END();
10285 break;
10286
10287 case IEMMODE_32BIT:
10288 IEM_MC_BEGIN(0, 1);
10289 IEM_MC_LOCAL(uint32_t, u32Value);
10290 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10291 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10292 IEM_MC_ADVANCE_RIP();
10293 IEM_MC_END();
10294 break;
10295
10296 case IEMMODE_64BIT:
10297 IEM_MC_BEGIN(0, 1);
10298 IEM_MC_LOCAL(uint64_t, u64Value);
10299 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10300 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10301 IEM_MC_ADVANCE_RIP();
10302 IEM_MC_END();
10303 break;
10304 }
10305 }
10306 else
10307 {
10308 /*
10309 * We're saving the register to memory. The access is word sized
10310 * regardless of operand size prefixes.
10311 */
10312#if 0 /* not necessary */
10313 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10314#endif
10315 IEM_MC_BEGIN(0, 2);
10316 IEM_MC_LOCAL(uint16_t, u16Value);
10317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10319 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10320 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10321 IEM_MC_ADVANCE_RIP();
10322 IEM_MC_END();
10323 }
10324 return VINF_SUCCESS;
10325}
10326
10327
10328
10329
10330/** Opcode 0x8d. */
10331FNIEMOP_DEF(iemOp_lea_Gv_M)
10332{
10333 IEMOP_MNEMONIC("lea Gv,M");
10334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10335 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10337 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10338
10339 switch (pIemCpu->enmEffOpSize)
10340 {
10341 case IEMMODE_16BIT:
10342 IEM_MC_BEGIN(0, 2);
10343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10344 IEM_MC_LOCAL(uint16_t, u16Cast);
10345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10346 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10347 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10348 IEM_MC_ADVANCE_RIP();
10349 IEM_MC_END();
10350 return VINF_SUCCESS;
10351
10352 case IEMMODE_32BIT:
10353 IEM_MC_BEGIN(0, 2);
10354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10355 IEM_MC_LOCAL(uint32_t, u32Cast);
10356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10357 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10358 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10359 IEM_MC_ADVANCE_RIP();
10360 IEM_MC_END();
10361 return VINF_SUCCESS;
10362
10363 case IEMMODE_64BIT:
10364 IEM_MC_BEGIN(0, 1);
10365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10367 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10368 IEM_MC_ADVANCE_RIP();
10369 IEM_MC_END();
10370 return VINF_SUCCESS;
10371 }
10372 AssertFailedReturn(VERR_IEM_IPE_7);
10373}
10374
10375
10376/** Opcode 0x8e. */
10377FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10378{
10379 IEMOP_MNEMONIC("mov Sw,Ev");
10380
10381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10382 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10383
10384 /*
10385 * The practical operand size is 16-bit.
10386 */
10387#if 0 /* not necessary */
10388 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10389#endif
10390
10391 /*
10392 * Check that the destination register exists and can be used with this
10393 * instruction. The REX.R prefix is ignored.
10394 */
10395 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10396 if ( iSegReg == X86_SREG_CS
10397 || iSegReg > X86_SREG_GS)
10398 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10399
10400 /*
10401 * If rm is denoting a register, no more instruction bytes.
10402 */
10403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10404 {
10405 IEM_MC_BEGIN(2, 0);
10406 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10407 IEM_MC_ARG(uint16_t, u16Value, 1);
10408 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10409 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10410 IEM_MC_END();
10411 }
10412 else
10413 {
10414 /*
10415 * We're loading the register from memory. The access is word sized
10416 * regardless of operand size prefixes.
10417 */
10418 IEM_MC_BEGIN(2, 1);
10419 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10420 IEM_MC_ARG(uint16_t, u16Value, 1);
10421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10423 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10424 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10425 IEM_MC_END();
10426 }
10427 return VINF_SUCCESS;
10428}
10429
10430
10431/** Opcode 0x8f /0. */
10432FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10433{
10434 /* This bugger is rather annoying as it requires rSP to be updated before
10435 doing the effective address calculations. Will eventually require a
10436 split between the R/M+SIB decoding and the effective address
10437 calculation - which is something that is required for any attempt at
10438 reusing this code for a recompiler. It may also be good to have if we
10439 need to delay #UD exception caused by invalid lock prefixes.
10440
10441 For now, we'll do a mostly safe interpreter-only implementation here. */
10442 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10443 * now until tests show it's checked.. */
10444 IEMOP_MNEMONIC("pop Ev");
10445 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10446
10447 /* Register access is relatively easy and can share code. */
10448 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10449 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10450
10451 /*
10452 * Memory target.
10453 *
10454 * Intel says that RSP is incremented before it's used in any effective
10455 * address calcuations. This means some serious extra annoyance here since
10456 * we decode and calculate the effective address in one step and like to
10457 * delay committing registers till everything is done.
10458 *
10459 * So, we'll decode and calculate the effective address twice. This will
10460 * require some recoding if turned into a recompiler.
10461 */
10462 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10463
10464#ifndef TST_IEM_CHECK_MC
10465 /* Calc effective address with modified ESP. */
10466 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10467 RTGCPTR GCPtrEff;
10468 VBOXSTRICTRC rcStrict;
10469 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10470 if (rcStrict != VINF_SUCCESS)
10471 return rcStrict;
10472 pIemCpu->offOpcode = offOpcodeSaved;
10473
10474 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10475 uint64_t const RspSaved = pCtx->rsp;
10476 switch (pIemCpu->enmEffOpSize)
10477 {
10478 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10479 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10480 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10482 }
10483 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10484 Assert(rcStrict == VINF_SUCCESS);
10485 pCtx->rsp = RspSaved;
10486
10487 /* Perform the operation - this should be CImpl. */
10488 RTUINT64U TmpRsp;
10489 TmpRsp.u = pCtx->rsp;
10490 switch (pIemCpu->enmEffOpSize)
10491 {
10492 case IEMMODE_16BIT:
10493 {
10494 uint16_t u16Value;
10495 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10496 if (rcStrict == VINF_SUCCESS)
10497 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10498 break;
10499 }
10500
10501 case IEMMODE_32BIT:
10502 {
10503 uint32_t u32Value;
10504 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10505 if (rcStrict == VINF_SUCCESS)
10506 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10507 break;
10508 }
10509
10510 case IEMMODE_64BIT:
10511 {
10512 uint64_t u64Value;
10513 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10514 if (rcStrict == VINF_SUCCESS)
10515 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10516 break;
10517 }
10518
10519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10520 }
10521 if (rcStrict == VINF_SUCCESS)
10522 {
10523 pCtx->rsp = TmpRsp.u;
10524 iemRegUpdateRipAndClearRF(pIemCpu);
10525 }
10526 return rcStrict;
10527
10528#else
10529 return VERR_IEM_IPE_2;
10530#endif
10531}
10532
10533
10534/** Opcode 0x8f. */
10535FNIEMOP_DEF(iemOp_Grp1A)
10536{
10537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10538 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10539 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10540
10541 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10542 /** @todo XOP decoding. */
10543 IEMOP_MNEMONIC("3-byte-xop");
10544 return IEMOP_RAISE_INVALID_OPCODE();
10545}
10546
10547
10548/**
10549 * Common 'xchg reg,rAX' helper.
10550 */
10551FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10552{
10553 IEMOP_HLP_NO_LOCK_PREFIX();
10554
10555 iReg |= pIemCpu->uRexB;
10556 switch (pIemCpu->enmEffOpSize)
10557 {
10558 case IEMMODE_16BIT:
10559 IEM_MC_BEGIN(0, 2);
10560 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10561 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10562 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10563 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10564 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10565 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10566 IEM_MC_ADVANCE_RIP();
10567 IEM_MC_END();
10568 return VINF_SUCCESS;
10569
10570 case IEMMODE_32BIT:
10571 IEM_MC_BEGIN(0, 2);
10572 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10573 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10574 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10575 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10576 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10577 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10578 IEM_MC_ADVANCE_RIP();
10579 IEM_MC_END();
10580 return VINF_SUCCESS;
10581
10582 case IEMMODE_64BIT:
10583 IEM_MC_BEGIN(0, 2);
10584 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10585 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10586 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10587 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10588 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10589 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10590 IEM_MC_ADVANCE_RIP();
10591 IEM_MC_END();
10592 return VINF_SUCCESS;
10593
10594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10595 }
10596}
10597
10598
10599/** Opcode 0x90. */
10600FNIEMOP_DEF(iemOp_nop)
10601{
10602 /* R8/R8D and RAX/EAX can be exchanged. */
10603 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10604 {
10605 IEMOP_MNEMONIC("xchg r8,rAX");
10606 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10607 }
10608
10609 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10610 IEMOP_MNEMONIC("pause");
10611 else
10612 IEMOP_MNEMONIC("nop");
10613 IEM_MC_BEGIN(0, 0);
10614 IEM_MC_ADVANCE_RIP();
10615 IEM_MC_END();
10616 return VINF_SUCCESS;
10617}
10618
10619
10620/** Opcode 0x91. */
10621FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10622{
10623 IEMOP_MNEMONIC("xchg rCX,rAX");
10624 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10625}
10626
10627
10628/** Opcode 0x92. */
10629FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10630{
10631 IEMOP_MNEMONIC("xchg rDX,rAX");
10632 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10633}
10634
10635
10636/** Opcode 0x93. */
10637FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10638{
10639 IEMOP_MNEMONIC("xchg rBX,rAX");
10640 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10641}
10642
10643
10644/** Opcode 0x94. */
10645FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10646{
10647 IEMOP_MNEMONIC("xchg rSX,rAX");
10648 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10649}
10650
10651
10652/** Opcode 0x95. */
10653FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10654{
10655 IEMOP_MNEMONIC("xchg rBP,rAX");
10656 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10657}
10658
10659
10660/** Opcode 0x96. */
10661FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10662{
10663 IEMOP_MNEMONIC("xchg rSI,rAX");
10664 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10665}
10666
10667
10668/** Opcode 0x97. */
10669FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10670{
10671 IEMOP_MNEMONIC("xchg rDI,rAX");
10672 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10673}
10674
10675
10676/** Opcode 0x98. */
10677FNIEMOP_DEF(iemOp_cbw)
10678{
10679 IEMOP_HLP_NO_LOCK_PREFIX();
10680 switch (pIemCpu->enmEffOpSize)
10681 {
10682 case IEMMODE_16BIT:
10683 IEMOP_MNEMONIC("cbw");
10684 IEM_MC_BEGIN(0, 1);
10685 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10686 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10687 } IEM_MC_ELSE() {
10688 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10689 } IEM_MC_ENDIF();
10690 IEM_MC_ADVANCE_RIP();
10691 IEM_MC_END();
10692 return VINF_SUCCESS;
10693
10694 case IEMMODE_32BIT:
10695 IEMOP_MNEMONIC("cwde");
10696 IEM_MC_BEGIN(0, 1);
10697 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10698 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10699 } IEM_MC_ELSE() {
10700 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10701 } IEM_MC_ENDIF();
10702 IEM_MC_ADVANCE_RIP();
10703 IEM_MC_END();
10704 return VINF_SUCCESS;
10705
10706 case IEMMODE_64BIT:
10707 IEMOP_MNEMONIC("cdqe");
10708 IEM_MC_BEGIN(0, 1);
10709 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10710 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10711 } IEM_MC_ELSE() {
10712 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10713 } IEM_MC_ENDIF();
10714 IEM_MC_ADVANCE_RIP();
10715 IEM_MC_END();
10716 return VINF_SUCCESS;
10717
10718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10719 }
10720}
10721
10722
10723/** Opcode 0x99. */
10724FNIEMOP_DEF(iemOp_cwd)
10725{
10726 IEMOP_HLP_NO_LOCK_PREFIX();
10727 switch (pIemCpu->enmEffOpSize)
10728 {
10729 case IEMMODE_16BIT:
10730 IEMOP_MNEMONIC("cwd");
10731 IEM_MC_BEGIN(0, 1);
10732 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10733 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10734 } IEM_MC_ELSE() {
10735 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10736 } IEM_MC_ENDIF();
10737 IEM_MC_ADVANCE_RIP();
10738 IEM_MC_END();
10739 return VINF_SUCCESS;
10740
10741 case IEMMODE_32BIT:
10742 IEMOP_MNEMONIC("cdq");
10743 IEM_MC_BEGIN(0, 1);
10744 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10745 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10746 } IEM_MC_ELSE() {
10747 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10748 } IEM_MC_ENDIF();
10749 IEM_MC_ADVANCE_RIP();
10750 IEM_MC_END();
10751 return VINF_SUCCESS;
10752
10753 case IEMMODE_64BIT:
10754 IEMOP_MNEMONIC("cqo");
10755 IEM_MC_BEGIN(0, 1);
10756 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10757 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10758 } IEM_MC_ELSE() {
10759 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10760 } IEM_MC_ENDIF();
10761 IEM_MC_ADVANCE_RIP();
10762 IEM_MC_END();
10763 return VINF_SUCCESS;
10764
10765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10766 }
10767}
10768
10769
10770/** Opcode 0x9a. */
10771FNIEMOP_DEF(iemOp_call_Ap)
10772{
10773 IEMOP_MNEMONIC("call Ap");
10774 IEMOP_HLP_NO_64BIT();
10775
10776 /* Decode the far pointer address and pass it on to the far call C implementation. */
10777 uint32_t offSeg;
10778 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10779 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10780 else
10781 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10782 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10784 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10785}
10786
10787
10788/** Opcode 0x9b. (aka fwait) */
10789FNIEMOP_DEF(iemOp_wait)
10790{
10791 IEMOP_MNEMONIC("wait");
10792 IEMOP_HLP_NO_LOCK_PREFIX();
10793
10794 IEM_MC_BEGIN(0, 0);
10795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10796 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10797 IEM_MC_ADVANCE_RIP();
10798 IEM_MC_END();
10799 return VINF_SUCCESS;
10800}
10801
10802
10803/** Opcode 0x9c. */
10804FNIEMOP_DEF(iemOp_pushf_Fv)
10805{
10806 IEMOP_HLP_NO_LOCK_PREFIX();
10807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10808 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10809}
10810
10811
10812/** Opcode 0x9d. */
10813FNIEMOP_DEF(iemOp_popf_Fv)
10814{
10815 IEMOP_HLP_NO_LOCK_PREFIX();
10816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10817 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10818}
10819
10820
10821/** Opcode 0x9e. */
10822FNIEMOP_DEF(iemOp_sahf)
10823{
10824 IEMOP_MNEMONIC("sahf");
10825 IEMOP_HLP_NO_LOCK_PREFIX();
10826 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10827 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10828 return IEMOP_RAISE_INVALID_OPCODE();
10829 IEM_MC_BEGIN(0, 2);
10830 IEM_MC_LOCAL(uint32_t, u32Flags);
10831 IEM_MC_LOCAL(uint32_t, EFlags);
10832 IEM_MC_FETCH_EFLAGS(EFlags);
10833 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10834 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10835 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10836 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10837 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10838 IEM_MC_COMMIT_EFLAGS(EFlags);
10839 IEM_MC_ADVANCE_RIP();
10840 IEM_MC_END();
10841 return VINF_SUCCESS;
10842}
10843
10844
10845/** Opcode 0x9f. */
10846FNIEMOP_DEF(iemOp_lahf)
10847{
10848 IEMOP_MNEMONIC("lahf");
10849 IEMOP_HLP_NO_LOCK_PREFIX();
10850 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10851 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10852 return IEMOP_RAISE_INVALID_OPCODE();
10853 IEM_MC_BEGIN(0, 1);
10854 IEM_MC_LOCAL(uint8_t, u8Flags);
10855 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10856 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10857 IEM_MC_ADVANCE_RIP();
10858 IEM_MC_END();
10859 return VINF_SUCCESS;
10860}
10861
10862
10863/**
10864 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10865 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10866 * prefixes. Will return on failures.
10867 * @param a_GCPtrMemOff The variable to store the offset in.
10868 */
10869#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10870 do \
10871 { \
10872 switch (pIemCpu->enmEffAddrMode) \
10873 { \
10874 case IEMMODE_16BIT: \
10875 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10876 break; \
10877 case IEMMODE_32BIT: \
10878 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10879 break; \
10880 case IEMMODE_64BIT: \
10881 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10882 break; \
10883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10884 } \
10885 IEMOP_HLP_NO_LOCK_PREFIX(); \
10886 } while (0)
10887
10888/** Opcode 0xa0. */
10889FNIEMOP_DEF(iemOp_mov_Al_Ob)
10890{
10891 /*
10892 * Get the offset and fend of lock prefixes.
10893 */
10894 RTGCPTR GCPtrMemOff;
10895 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10896
10897 /*
10898 * Fetch AL.
10899 */
10900 IEM_MC_BEGIN(0,1);
10901 IEM_MC_LOCAL(uint8_t, u8Tmp);
10902 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10903 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10904 IEM_MC_ADVANCE_RIP();
10905 IEM_MC_END();
10906 return VINF_SUCCESS;
10907}
10908
10909
10910/** Opcode 0xa1. */
10911FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10912{
10913 /*
10914 * Get the offset and fend of lock prefixes.
10915 */
10916 IEMOP_MNEMONIC("mov rAX,Ov");
10917 RTGCPTR GCPtrMemOff;
10918 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10919
10920 /*
10921 * Fetch rAX.
10922 */
10923 switch (pIemCpu->enmEffOpSize)
10924 {
10925 case IEMMODE_16BIT:
10926 IEM_MC_BEGIN(0,1);
10927 IEM_MC_LOCAL(uint16_t, u16Tmp);
10928 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10929 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10930 IEM_MC_ADVANCE_RIP();
10931 IEM_MC_END();
10932 return VINF_SUCCESS;
10933
10934 case IEMMODE_32BIT:
10935 IEM_MC_BEGIN(0,1);
10936 IEM_MC_LOCAL(uint32_t, u32Tmp);
10937 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10938 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10939 IEM_MC_ADVANCE_RIP();
10940 IEM_MC_END();
10941 return VINF_SUCCESS;
10942
10943 case IEMMODE_64BIT:
10944 IEM_MC_BEGIN(0,1);
10945 IEM_MC_LOCAL(uint64_t, u64Tmp);
10946 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10947 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10948 IEM_MC_ADVANCE_RIP();
10949 IEM_MC_END();
10950 return VINF_SUCCESS;
10951
10952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10953 }
10954}
10955
10956
10957/** Opcode 0xa2. */
10958FNIEMOP_DEF(iemOp_mov_Ob_AL)
10959{
10960 /*
10961 * Get the offset and fend of lock prefixes.
10962 */
10963 RTGCPTR GCPtrMemOff;
10964 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10965
10966 /*
10967 * Store AL.
10968 */
10969 IEM_MC_BEGIN(0,1);
10970 IEM_MC_LOCAL(uint8_t, u8Tmp);
10971 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10972 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10973 IEM_MC_ADVANCE_RIP();
10974 IEM_MC_END();
10975 return VINF_SUCCESS;
10976}
10977
10978
10979/** Opcode 0xa3. */
10980FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10981{
10982 /*
10983 * Get the offset and fend of lock prefixes.
10984 */
10985 RTGCPTR GCPtrMemOff;
10986 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10987
10988 /*
10989 * Store rAX.
10990 */
10991 switch (pIemCpu->enmEffOpSize)
10992 {
10993 case IEMMODE_16BIT:
10994 IEM_MC_BEGIN(0,1);
10995 IEM_MC_LOCAL(uint16_t, u16Tmp);
10996 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10997 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10998 IEM_MC_ADVANCE_RIP();
10999 IEM_MC_END();
11000 return VINF_SUCCESS;
11001
11002 case IEMMODE_32BIT:
11003 IEM_MC_BEGIN(0,1);
11004 IEM_MC_LOCAL(uint32_t, u32Tmp);
11005 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11006 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11007 IEM_MC_ADVANCE_RIP();
11008 IEM_MC_END();
11009 return VINF_SUCCESS;
11010
11011 case IEMMODE_64BIT:
11012 IEM_MC_BEGIN(0,1);
11013 IEM_MC_LOCAL(uint64_t, u64Tmp);
11014 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11015 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11016 IEM_MC_ADVANCE_RIP();
11017 IEM_MC_END();
11018 return VINF_SUCCESS;
11019
11020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11021 }
11022}
11023
11024/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11025#define IEM_MOVS_CASE(ValBits, AddrBits) \
11026 IEM_MC_BEGIN(0, 2); \
11027 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11028 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11029 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11030 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11031 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11032 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11034 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11035 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11036 } IEM_MC_ELSE() { \
11037 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11038 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11039 } IEM_MC_ENDIF(); \
11040 IEM_MC_ADVANCE_RIP(); \
11041 IEM_MC_END();
11042
11043/** Opcode 0xa4. */
11044FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11045{
11046 IEMOP_HLP_NO_LOCK_PREFIX();
11047
11048 /*
11049 * Use the C implementation if a repeat prefix is encountered.
11050 */
11051 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11052 {
11053 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11054 switch (pIemCpu->enmEffAddrMode)
11055 {
11056 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11057 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11058 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11060 }
11061 }
11062 IEMOP_MNEMONIC("movsb Xb,Yb");
11063
11064 /*
11065 * Sharing case implementation with movs[wdq] below.
11066 */
11067 switch (pIemCpu->enmEffAddrMode)
11068 {
11069 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11070 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11071 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11073 }
11074 return VINF_SUCCESS;
11075}
11076
11077
11078/** Opcode 0xa5. */
11079FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11080{
11081 IEMOP_HLP_NO_LOCK_PREFIX();
11082
11083 /*
11084 * Use the C implementation if a repeat prefix is encountered.
11085 */
11086 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11087 {
11088 IEMOP_MNEMONIC("rep movs Xv,Yv");
11089 switch (pIemCpu->enmEffOpSize)
11090 {
11091 case IEMMODE_16BIT:
11092 switch (pIemCpu->enmEffAddrMode)
11093 {
11094 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11095 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11096 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11098 }
11099 break;
11100 case IEMMODE_32BIT:
11101 switch (pIemCpu->enmEffAddrMode)
11102 {
11103 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11104 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11105 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11107 }
11108 case IEMMODE_64BIT:
11109 switch (pIemCpu->enmEffAddrMode)
11110 {
11111 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11112 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11113 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11115 }
11116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11117 }
11118 }
11119 IEMOP_MNEMONIC("movs Xv,Yv");
11120
11121 /*
11122 * Annoying double switch here.
11123 * Using ugly macro for implementing the cases, sharing it with movsb.
11124 */
11125 switch (pIemCpu->enmEffOpSize)
11126 {
11127 case IEMMODE_16BIT:
11128 switch (pIemCpu->enmEffAddrMode)
11129 {
11130 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11131 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11132 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11134 }
11135 break;
11136
11137 case IEMMODE_32BIT:
11138 switch (pIemCpu->enmEffAddrMode)
11139 {
11140 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11141 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11142 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11144 }
11145 break;
11146
11147 case IEMMODE_64BIT:
11148 switch (pIemCpu->enmEffAddrMode)
11149 {
11150 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11151 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11152 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11154 }
11155 break;
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 return VINF_SUCCESS;
11159}
11160
11161#undef IEM_MOVS_CASE
11162
11163/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11164#define IEM_CMPS_CASE(ValBits, AddrBits) \
11165 IEM_MC_BEGIN(3, 3); \
11166 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11167 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11168 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11169 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11170 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11171 \
11172 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11173 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11174 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11175 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11176 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11177 IEM_MC_REF_EFLAGS(pEFlags); \
11178 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11179 \
11180 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11181 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11182 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11183 } IEM_MC_ELSE() { \
11184 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11185 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11186 } IEM_MC_ENDIF(); \
11187 IEM_MC_ADVANCE_RIP(); \
11188 IEM_MC_END(); \
11189
11190/** Opcode 0xa6. */
11191FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11192{
11193 IEMOP_HLP_NO_LOCK_PREFIX();
11194
11195 /*
11196 * Use the C implementation if a repeat prefix is encountered.
11197 */
11198 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11199 {
11200 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11201 switch (pIemCpu->enmEffAddrMode)
11202 {
11203 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11204 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11205 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11207 }
11208 }
11209 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11210 {
11211 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11212 switch (pIemCpu->enmEffAddrMode)
11213 {
11214 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11215 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11216 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11218 }
11219 }
11220 IEMOP_MNEMONIC("cmps Xb,Yb");
11221
11222 /*
11223 * Sharing case implementation with cmps[wdq] below.
11224 */
11225 switch (pIemCpu->enmEffAddrMode)
11226 {
11227 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11228 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11229 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11231 }
11232 return VINF_SUCCESS;
11233
11234}
11235
11236
11237/** Opcode 0xa7. */
11238FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11239{
11240 IEMOP_HLP_NO_LOCK_PREFIX();
11241
11242 /*
11243 * Use the C implementation if a repeat prefix is encountered.
11244 */
11245 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11246 {
11247 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11248 switch (pIemCpu->enmEffOpSize)
11249 {
11250 case IEMMODE_16BIT:
11251 switch (pIemCpu->enmEffAddrMode)
11252 {
11253 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11254 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11255 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11257 }
11258 break;
11259 case IEMMODE_32BIT:
11260 switch (pIemCpu->enmEffAddrMode)
11261 {
11262 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11263 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11264 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11266 }
11267 case IEMMODE_64BIT:
11268 switch (pIemCpu->enmEffAddrMode)
11269 {
11270 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11271 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11272 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11274 }
11275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11276 }
11277 }
11278
11279 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11280 {
11281 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11282 switch (pIemCpu->enmEffOpSize)
11283 {
11284 case IEMMODE_16BIT:
11285 switch (pIemCpu->enmEffAddrMode)
11286 {
11287 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11288 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11289 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11291 }
11292 break;
11293 case IEMMODE_32BIT:
11294 switch (pIemCpu->enmEffAddrMode)
11295 {
11296 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11297 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11298 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11300 }
11301 case IEMMODE_64BIT:
11302 switch (pIemCpu->enmEffAddrMode)
11303 {
11304 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11308 }
11309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11310 }
11311 }
11312
11313 IEMOP_MNEMONIC("cmps Xv,Yv");
11314
11315 /*
11316 * Annoying double switch here.
11317 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11318 */
11319 switch (pIemCpu->enmEffOpSize)
11320 {
11321 case IEMMODE_16BIT:
11322 switch (pIemCpu->enmEffAddrMode)
11323 {
11324 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11325 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11326 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11328 }
11329 break;
11330
11331 case IEMMODE_32BIT:
11332 switch (pIemCpu->enmEffAddrMode)
11333 {
11334 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11335 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11336 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11338 }
11339 break;
11340
11341 case IEMMODE_64BIT:
11342 switch (pIemCpu->enmEffAddrMode)
11343 {
11344 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11345 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11346 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11348 }
11349 break;
11350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11351 }
11352 return VINF_SUCCESS;
11353
11354}
11355
11356#undef IEM_CMPS_CASE
11357
11358/** Opcode 0xa8. */
11359FNIEMOP_DEF(iemOp_test_AL_Ib)
11360{
11361 IEMOP_MNEMONIC("test al,Ib");
11362 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11363 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11364}
11365
11366
11367/** Opcode 0xa9. */
11368FNIEMOP_DEF(iemOp_test_eAX_Iz)
11369{
11370 IEMOP_MNEMONIC("test rAX,Iz");
11371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11372 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11373}
11374
11375
11376/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11377#define IEM_STOS_CASE(ValBits, AddrBits) \
11378 IEM_MC_BEGIN(0, 2); \
11379 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11380 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11381 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11382 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11383 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11385 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11386 } IEM_MC_ELSE() { \
11387 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11388 } IEM_MC_ENDIF(); \
11389 IEM_MC_ADVANCE_RIP(); \
11390 IEM_MC_END(); \
11391
11392/** Opcode 0xaa. */
11393FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11394{
11395 IEMOP_HLP_NO_LOCK_PREFIX();
11396
11397 /*
11398 * Use the C implementation if a repeat prefix is encountered.
11399 */
11400 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11401 {
11402 IEMOP_MNEMONIC("rep stos Yb,al");
11403 switch (pIemCpu->enmEffAddrMode)
11404 {
11405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11409 }
11410 }
11411 IEMOP_MNEMONIC("stos Yb,al");
11412
11413 /*
11414 * Sharing case implementation with stos[wdq] below.
11415 */
11416 switch (pIemCpu->enmEffAddrMode)
11417 {
11418 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11419 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11420 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11422 }
11423 return VINF_SUCCESS;
11424}
11425
11426
11427/** Opcode 0xab. */
11428FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11429{
11430 IEMOP_HLP_NO_LOCK_PREFIX();
11431
11432 /*
11433 * Use the C implementation if a repeat prefix is encountered.
11434 */
11435 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11436 {
11437 IEMOP_MNEMONIC("rep stos Yv,rAX");
11438 switch (pIemCpu->enmEffOpSize)
11439 {
11440 case IEMMODE_16BIT:
11441 switch (pIemCpu->enmEffAddrMode)
11442 {
11443 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11444 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11445 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11447 }
11448 break;
11449 case IEMMODE_32BIT:
11450 switch (pIemCpu->enmEffAddrMode)
11451 {
11452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11456 }
11457 case IEMMODE_64BIT:
11458 switch (pIemCpu->enmEffAddrMode)
11459 {
11460 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11461 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11462 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11464 }
11465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11466 }
11467 }
11468 IEMOP_MNEMONIC("stos Yv,rAX");
11469
11470 /*
11471 * Annoying double switch here.
11472 * Using ugly macro for implementing the cases, sharing it with stosb.
11473 */
11474 switch (pIemCpu->enmEffOpSize)
11475 {
11476 case IEMMODE_16BIT:
11477 switch (pIemCpu->enmEffAddrMode)
11478 {
11479 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11480 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11481 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11483 }
11484 break;
11485
11486 case IEMMODE_32BIT:
11487 switch (pIemCpu->enmEffAddrMode)
11488 {
11489 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11490 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11491 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11493 }
11494 break;
11495
11496 case IEMMODE_64BIT:
11497 switch (pIemCpu->enmEffAddrMode)
11498 {
11499 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11500 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11501 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11503 }
11504 break;
11505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11506 }
11507 return VINF_SUCCESS;
11508}
11509
11510#undef IEM_STOS_CASE
11511
11512/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11513#define IEM_LODS_CASE(ValBits, AddrBits) \
11514 IEM_MC_BEGIN(0, 2); \
11515 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11516 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11517 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11518 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11519 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11520 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11521 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11522 } IEM_MC_ELSE() { \
11523 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11524 } IEM_MC_ENDIF(); \
11525 IEM_MC_ADVANCE_RIP(); \
11526 IEM_MC_END();
11527
11528/** Opcode 0xac. */
11529FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11530{
11531 IEMOP_HLP_NO_LOCK_PREFIX();
11532
11533 /*
11534 * Use the C implementation if a repeat prefix is encountered.
11535 */
11536 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11537 {
11538 IEMOP_MNEMONIC("rep lodsb al,Xb");
11539 switch (pIemCpu->enmEffAddrMode)
11540 {
11541 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11542 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11543 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11545 }
11546 }
11547 IEMOP_MNEMONIC("lodsb al,Xb");
11548
11549 /*
11550 * Sharing case implementation with stos[wdq] below.
11551 */
11552 switch (pIemCpu->enmEffAddrMode)
11553 {
11554 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11555 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11556 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11558 }
11559 return VINF_SUCCESS;
11560}
11561
11562
11563/** Opcode 0xad. */
11564FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11565{
11566 IEMOP_HLP_NO_LOCK_PREFIX();
11567
11568 /*
11569 * Use the C implementation if a repeat prefix is encountered.
11570 */
11571 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11572 {
11573 IEMOP_MNEMONIC("rep lods rAX,Xv");
11574 switch (pIemCpu->enmEffOpSize)
11575 {
11576 case IEMMODE_16BIT:
11577 switch (pIemCpu->enmEffAddrMode)
11578 {
11579 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11580 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11581 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11583 }
11584 break;
11585 case IEMMODE_32BIT:
11586 switch (pIemCpu->enmEffAddrMode)
11587 {
11588 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11589 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11590 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11592 }
11593 case IEMMODE_64BIT:
11594 switch (pIemCpu->enmEffAddrMode)
11595 {
11596 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11597 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11598 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11600 }
11601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11602 }
11603 }
11604 IEMOP_MNEMONIC("lods rAX,Xv");
11605
11606 /*
11607 * Annoying double switch here.
11608 * Using ugly macro for implementing the cases, sharing it with lodsb.
11609 */
11610 switch (pIemCpu->enmEffOpSize)
11611 {
11612 case IEMMODE_16BIT:
11613 switch (pIemCpu->enmEffAddrMode)
11614 {
11615 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11616 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11617 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11619 }
11620 break;
11621
11622 case IEMMODE_32BIT:
11623 switch (pIemCpu->enmEffAddrMode)
11624 {
11625 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11626 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11627 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11629 }
11630 break;
11631
11632 case IEMMODE_64BIT:
11633 switch (pIemCpu->enmEffAddrMode)
11634 {
11635 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11636 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11637 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11639 }
11640 break;
11641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11642 }
11643 return VINF_SUCCESS;
11644}
11645
11646#undef IEM_LODS_CASE
11647
11648/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11649#define IEM_SCAS_CASE(ValBits, AddrBits) \
11650 IEM_MC_BEGIN(3, 2); \
11651 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11652 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11653 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11654 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11655 \
11656 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11657 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11658 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11659 IEM_MC_REF_EFLAGS(pEFlags); \
11660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11661 \
11662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11663 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11664 } IEM_MC_ELSE() { \
11665 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11666 } IEM_MC_ENDIF(); \
11667 IEM_MC_ADVANCE_RIP(); \
11668 IEM_MC_END();
11669
11670/** Opcode 0xae. */
11671FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11672{
11673 IEMOP_HLP_NO_LOCK_PREFIX();
11674
11675 /*
11676 * Use the C implementation if a repeat prefix is encountered.
11677 */
11678 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11679 {
11680 IEMOP_MNEMONIC("repe scasb al,Xb");
11681 switch (pIemCpu->enmEffAddrMode)
11682 {
11683 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11684 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11685 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11687 }
11688 }
11689 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11690 {
11691 IEMOP_MNEMONIC("repne scasb al,Xb");
11692 switch (pIemCpu->enmEffAddrMode)
11693 {
11694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11698 }
11699 }
11700 IEMOP_MNEMONIC("scasb al,Xb");
11701
11702 /*
11703 * Sharing case implementation with stos[wdq] below.
11704 */
11705 switch (pIemCpu->enmEffAddrMode)
11706 {
11707 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11708 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11709 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11711 }
11712 return VINF_SUCCESS;
11713}
11714
11715
11716/** Opcode 0xaf. */
11717FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11718{
11719 IEMOP_HLP_NO_LOCK_PREFIX();
11720
11721 /*
11722 * Use the C implementation if a repeat prefix is encountered.
11723 */
11724 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11725 {
11726 IEMOP_MNEMONIC("repe scas rAX,Xv");
11727 switch (pIemCpu->enmEffOpSize)
11728 {
11729 case IEMMODE_16BIT:
11730 switch (pIemCpu->enmEffAddrMode)
11731 {
11732 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11733 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11734 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11736 }
11737 break;
11738 case IEMMODE_32BIT:
11739 switch (pIemCpu->enmEffAddrMode)
11740 {
11741 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11742 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11743 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11745 }
11746 case IEMMODE_64BIT:
11747 switch (pIemCpu->enmEffAddrMode)
11748 {
11749 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11750 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11751 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11753 }
11754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11755 }
11756 }
11757 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11758 {
11759 IEMOP_MNEMONIC("repne scas rAX,Xv");
11760 switch (pIemCpu->enmEffOpSize)
11761 {
11762 case IEMMODE_16BIT:
11763 switch (pIemCpu->enmEffAddrMode)
11764 {
11765 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11766 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11767 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11769 }
11770 break;
11771 case IEMMODE_32BIT:
11772 switch (pIemCpu->enmEffAddrMode)
11773 {
11774 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11775 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11776 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11778 }
11779 case IEMMODE_64BIT:
11780 switch (pIemCpu->enmEffAddrMode)
11781 {
11782 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11786 }
11787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11788 }
11789 }
11790 IEMOP_MNEMONIC("scas rAX,Xv");
11791
11792 /*
11793 * Annoying double switch here.
11794 * Using ugly macro for implementing the cases, sharing it with scasb.
11795 */
11796 switch (pIemCpu->enmEffOpSize)
11797 {
11798 case IEMMODE_16BIT:
11799 switch (pIemCpu->enmEffAddrMode)
11800 {
11801 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11802 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11803 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11805 }
11806 break;
11807
11808 case IEMMODE_32BIT:
11809 switch (pIemCpu->enmEffAddrMode)
11810 {
11811 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11812 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11813 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11815 }
11816 break;
11817
11818 case IEMMODE_64BIT:
11819 switch (pIemCpu->enmEffAddrMode)
11820 {
11821 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11822 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11823 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11825 }
11826 break;
11827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11828 }
11829 return VINF_SUCCESS;
11830}
11831
11832#undef IEM_SCAS_CASE
11833
11834/**
11835 * Common 'mov r8, imm8' helper.
11836 */
11837FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11838{
11839 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11840 IEMOP_HLP_NO_LOCK_PREFIX();
11841
11842 IEM_MC_BEGIN(0, 1);
11843 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11844 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11845 IEM_MC_ADVANCE_RIP();
11846 IEM_MC_END();
11847
11848 return VINF_SUCCESS;
11849}
11850
11851
11852/** Opcode 0xb0. */
11853FNIEMOP_DEF(iemOp_mov_AL_Ib)
11854{
11855 IEMOP_MNEMONIC("mov AL,Ib");
11856 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11857}
11858
11859
11860/** Opcode 0xb1. */
11861FNIEMOP_DEF(iemOp_CL_Ib)
11862{
11863 IEMOP_MNEMONIC("mov CL,Ib");
11864 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11865}
11866
11867
11868/** Opcode 0xb2. */
11869FNIEMOP_DEF(iemOp_DL_Ib)
11870{
11871 IEMOP_MNEMONIC("mov DL,Ib");
11872 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11873}
11874
11875
11876/** Opcode 0xb3. */
11877FNIEMOP_DEF(iemOp_BL_Ib)
11878{
11879 IEMOP_MNEMONIC("mov BL,Ib");
11880 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11881}
11882
11883
11884/** Opcode 0xb4. */
11885FNIEMOP_DEF(iemOp_mov_AH_Ib)
11886{
11887 IEMOP_MNEMONIC("mov AH,Ib");
11888 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11889}
11890
11891
11892/** Opcode 0xb5. */
11893FNIEMOP_DEF(iemOp_CH_Ib)
11894{
11895 IEMOP_MNEMONIC("mov CH,Ib");
11896 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11897}
11898
11899
11900/** Opcode 0xb6. */
11901FNIEMOP_DEF(iemOp_DH_Ib)
11902{
11903 IEMOP_MNEMONIC("mov DH,Ib");
11904 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11905}
11906
11907
11908/** Opcode 0xb7. */
11909FNIEMOP_DEF(iemOp_BH_Ib)
11910{
11911 IEMOP_MNEMONIC("mov BH,Ib");
11912 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11913}
11914
11915
11916/**
11917 * Common 'mov regX,immX' helper.
11918 */
11919FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11920{
11921 switch (pIemCpu->enmEffOpSize)
11922 {
11923 case IEMMODE_16BIT:
11924 {
11925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11926 IEMOP_HLP_NO_LOCK_PREFIX();
11927
11928 IEM_MC_BEGIN(0, 1);
11929 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11930 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11931 IEM_MC_ADVANCE_RIP();
11932 IEM_MC_END();
11933 break;
11934 }
11935
11936 case IEMMODE_32BIT:
11937 {
11938 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11939 IEMOP_HLP_NO_LOCK_PREFIX();
11940
11941 IEM_MC_BEGIN(0, 1);
11942 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11943 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11944 IEM_MC_ADVANCE_RIP();
11945 IEM_MC_END();
11946 break;
11947 }
11948 case IEMMODE_64BIT:
11949 {
11950 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11951 IEMOP_HLP_NO_LOCK_PREFIX();
11952
11953 IEM_MC_BEGIN(0, 1);
11954 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11955 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11956 IEM_MC_ADVANCE_RIP();
11957 IEM_MC_END();
11958 break;
11959 }
11960 }
11961
11962 return VINF_SUCCESS;
11963}
11964
11965
11966/** Opcode 0xb8. */
11967FNIEMOP_DEF(iemOp_eAX_Iv)
11968{
11969 IEMOP_MNEMONIC("mov rAX,IV");
11970 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11971}
11972
11973
11974/** Opcode 0xb9. */
11975FNIEMOP_DEF(iemOp_eCX_Iv)
11976{
11977 IEMOP_MNEMONIC("mov rCX,IV");
11978 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11979}
11980
11981
11982/** Opcode 0xba. */
11983FNIEMOP_DEF(iemOp_eDX_Iv)
11984{
11985 IEMOP_MNEMONIC("mov rDX,IV");
11986 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11987}
11988
11989
11990/** Opcode 0xbb. */
11991FNIEMOP_DEF(iemOp_eBX_Iv)
11992{
11993 IEMOP_MNEMONIC("mov rBX,IV");
11994 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11995}
11996
11997
11998/** Opcode 0xbc. */
11999FNIEMOP_DEF(iemOp_eSP_Iv)
12000{
12001 IEMOP_MNEMONIC("mov rSP,IV");
12002 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12003}
12004
12005
12006/** Opcode 0xbd. */
12007FNIEMOP_DEF(iemOp_eBP_Iv)
12008{
12009 IEMOP_MNEMONIC("mov rBP,IV");
12010 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12011}
12012
12013
12014/** Opcode 0xbe. */
12015FNIEMOP_DEF(iemOp_eSI_Iv)
12016{
12017 IEMOP_MNEMONIC("mov rSI,IV");
12018 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12019}
12020
12021
12022/** Opcode 0xbf. */
12023FNIEMOP_DEF(iemOp_eDI_Iv)
12024{
12025 IEMOP_MNEMONIC("mov rDI,IV");
12026 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12027}
12028
12029
12030/** Opcode 0xc0. */
12031FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12032{
12033 IEMOP_HLP_MIN_186();
12034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12035 PCIEMOPSHIFTSIZES pImpl;
12036 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12037 {
12038 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12039 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12040 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12041 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12042 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12043 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12044 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12045 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12046 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12047 }
12048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12049
12050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12051 {
12052 /* register */
12053 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12054 IEMOP_HLP_NO_LOCK_PREFIX();
12055 IEM_MC_BEGIN(3, 0);
12056 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12057 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12059 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12060 IEM_MC_REF_EFLAGS(pEFlags);
12061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12062 IEM_MC_ADVANCE_RIP();
12063 IEM_MC_END();
12064 }
12065 else
12066 {
12067 /* memory */
12068 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12069 IEM_MC_BEGIN(3, 2);
12070 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12071 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12072 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12074
12075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12076 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12077 IEM_MC_ASSIGN(cShiftArg, cShift);
12078 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12079 IEM_MC_FETCH_EFLAGS(EFlags);
12080 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12081
12082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12083 IEM_MC_COMMIT_EFLAGS(EFlags);
12084 IEM_MC_ADVANCE_RIP();
12085 IEM_MC_END();
12086 }
12087 return VINF_SUCCESS;
12088}
12089
12090
12091/** Opcode 0xc1. */
12092FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12093{
12094 IEMOP_HLP_MIN_186();
12095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12096 PCIEMOPSHIFTSIZES pImpl;
12097 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12098 {
12099 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12100 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12101 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12102 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12103 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12104 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12105 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12106 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12107 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12108 }
12109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12110
12111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12112 {
12113 /* register */
12114 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12115 IEMOP_HLP_NO_LOCK_PREFIX();
12116 switch (pIemCpu->enmEffOpSize)
12117 {
12118 case IEMMODE_16BIT:
12119 IEM_MC_BEGIN(3, 0);
12120 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12121 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12122 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12123 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12124 IEM_MC_REF_EFLAGS(pEFlags);
12125 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12126 IEM_MC_ADVANCE_RIP();
12127 IEM_MC_END();
12128 return VINF_SUCCESS;
12129
12130 case IEMMODE_32BIT:
12131 IEM_MC_BEGIN(3, 0);
12132 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12133 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12136 IEM_MC_REF_EFLAGS(pEFlags);
12137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12138 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12139 IEM_MC_ADVANCE_RIP();
12140 IEM_MC_END();
12141 return VINF_SUCCESS;
12142
12143 case IEMMODE_64BIT:
12144 IEM_MC_BEGIN(3, 0);
12145 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12146 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12147 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12148 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12149 IEM_MC_REF_EFLAGS(pEFlags);
12150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12151 IEM_MC_ADVANCE_RIP();
12152 IEM_MC_END();
12153 return VINF_SUCCESS;
12154
12155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12156 }
12157 }
12158 else
12159 {
12160 /* memory */
12161 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12162 switch (pIemCpu->enmEffOpSize)
12163 {
12164 case IEMMODE_16BIT:
12165 IEM_MC_BEGIN(3, 2);
12166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12167 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12170
12171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12172 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12173 IEM_MC_ASSIGN(cShiftArg, cShift);
12174 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12175 IEM_MC_FETCH_EFLAGS(EFlags);
12176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12177
12178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12179 IEM_MC_COMMIT_EFLAGS(EFlags);
12180 IEM_MC_ADVANCE_RIP();
12181 IEM_MC_END();
12182 return VINF_SUCCESS;
12183
12184 case IEMMODE_32BIT:
12185 IEM_MC_BEGIN(3, 2);
12186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12187 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12188 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12190
12191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12192 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12193 IEM_MC_ASSIGN(cShiftArg, cShift);
12194 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12195 IEM_MC_FETCH_EFLAGS(EFlags);
12196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12197
12198 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12199 IEM_MC_COMMIT_EFLAGS(EFlags);
12200 IEM_MC_ADVANCE_RIP();
12201 IEM_MC_END();
12202 return VINF_SUCCESS;
12203
12204 case IEMMODE_64BIT:
12205 IEM_MC_BEGIN(3, 2);
12206 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12207 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12208 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12210
12211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12212 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12213 IEM_MC_ASSIGN(cShiftArg, cShift);
12214 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12215 IEM_MC_FETCH_EFLAGS(EFlags);
12216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12217
12218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12219 IEM_MC_COMMIT_EFLAGS(EFlags);
12220 IEM_MC_ADVANCE_RIP();
12221 IEM_MC_END();
12222 return VINF_SUCCESS;
12223
12224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12225 }
12226 }
12227}
12228
12229
12230/** Opcode 0xc2. */
12231FNIEMOP_DEF(iemOp_retn_Iw)
12232{
12233 IEMOP_MNEMONIC("retn Iw");
12234 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12235 IEMOP_HLP_NO_LOCK_PREFIX();
12236 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12237 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12238}
12239
12240
12241/** Opcode 0xc3. */
12242FNIEMOP_DEF(iemOp_retn)
12243{
12244 IEMOP_MNEMONIC("retn");
12245 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12246 IEMOP_HLP_NO_LOCK_PREFIX();
12247 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12248}
12249
12250
12251/** Opcode 0xc4. */
12252FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12253{
12254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12255 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12256 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12257 {
12258 IEMOP_MNEMONIC("2-byte-vex");
12259 /* The LES instruction is invalid 64-bit mode. In legacy and
12260 compatability mode it is invalid with MOD=3.
12261 The use as a VEX prefix is made possible by assigning the inverted
12262 REX.R to the top MOD bit, and the top bit in the inverted register
12263 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12264 to accessing registers 0..7 in this VEX form. */
12265 /** @todo VEX: Just use new tables for it. */
12266 return IEMOP_RAISE_INVALID_OPCODE();
12267 }
12268 IEMOP_MNEMONIC("les Gv,Mp");
12269 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12270}
12271
12272
12273/** Opcode 0xc5. */
12274FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12275{
12276 /* The LDS instruction is invalid 64-bit mode. In legacy and
12277 compatability mode it is invalid with MOD=3.
12278 The use as a VEX prefix is made possible by assigning the inverted
12279 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12280 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12282 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12283 {
12284 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12285 {
12286 IEMOP_MNEMONIC("lds Gv,Mp");
12287 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12288 }
12289 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12290 }
12291
12292 IEMOP_MNEMONIC("3-byte-vex");
12293 /** @todo Test when exctly the VEX conformance checks kick in during
12294 * instruction decoding and fetching (using \#PF). */
12295 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12296 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12297 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12298#if 0 /* will make sense of this next week... */
12299 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12300 &&
12301 )
12302 {
12303
12304 }
12305#endif
12306
12307 /** @todo VEX: Just use new tables for it. */
12308 return IEMOP_RAISE_INVALID_OPCODE();
12309}
12310
12311
12312/** Opcode 0xc6. */
12313FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12314{
12315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12316 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12317 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12318 return IEMOP_RAISE_INVALID_OPCODE();
12319 IEMOP_MNEMONIC("mov Eb,Ib");
12320
12321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12322 {
12323 /* register access */
12324 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12325 IEM_MC_BEGIN(0, 0);
12326 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12327 IEM_MC_ADVANCE_RIP();
12328 IEM_MC_END();
12329 }
12330 else
12331 {
12332 /* memory access. */
12333 IEM_MC_BEGIN(0, 1);
12334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12336 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12337 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12338 IEM_MC_ADVANCE_RIP();
12339 IEM_MC_END();
12340 }
12341 return VINF_SUCCESS;
12342}
12343
12344
12345/** Opcode 0xc7. */
12346FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12347{
12348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12349 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12350 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12351 return IEMOP_RAISE_INVALID_OPCODE();
12352 IEMOP_MNEMONIC("mov Ev,Iz");
12353
12354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12355 {
12356 /* register access */
12357 switch (pIemCpu->enmEffOpSize)
12358 {
12359 case IEMMODE_16BIT:
12360 IEM_MC_BEGIN(0, 0);
12361 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12362 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12363 IEM_MC_ADVANCE_RIP();
12364 IEM_MC_END();
12365 return VINF_SUCCESS;
12366
12367 case IEMMODE_32BIT:
12368 IEM_MC_BEGIN(0, 0);
12369 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12370 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12371 IEM_MC_ADVANCE_RIP();
12372 IEM_MC_END();
12373 return VINF_SUCCESS;
12374
12375 case IEMMODE_64BIT:
12376 IEM_MC_BEGIN(0, 0);
12377 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12378 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12379 IEM_MC_ADVANCE_RIP();
12380 IEM_MC_END();
12381 return VINF_SUCCESS;
12382
12383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12384 }
12385 }
12386 else
12387 {
12388 /* memory access. */
12389 switch (pIemCpu->enmEffOpSize)
12390 {
12391 case IEMMODE_16BIT:
12392 IEM_MC_BEGIN(0, 1);
12393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12395 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12396 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12397 IEM_MC_ADVANCE_RIP();
12398 IEM_MC_END();
12399 return VINF_SUCCESS;
12400
12401 case IEMMODE_32BIT:
12402 IEM_MC_BEGIN(0, 1);
12403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12405 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12406 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12407 IEM_MC_ADVANCE_RIP();
12408 IEM_MC_END();
12409 return VINF_SUCCESS;
12410
12411 case IEMMODE_64BIT:
12412 IEM_MC_BEGIN(0, 1);
12413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12415 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12416 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12417 IEM_MC_ADVANCE_RIP();
12418 IEM_MC_END();
12419 return VINF_SUCCESS;
12420
12421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12422 }
12423 }
12424}
12425
12426
12427
12428
12429/** Opcode 0xc8. */
12430FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12431{
12432 IEMOP_MNEMONIC("enter Iw,Ib");
12433 IEMOP_HLP_MIN_186();
12434 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12435 IEMOP_HLP_NO_LOCK_PREFIX();
12436 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12437 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12438 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12439}
12440
12441
12442/** Opcode 0xc9. */
12443FNIEMOP_DEF(iemOp_leave)
12444{
12445 IEMOP_MNEMONIC("retn");
12446 IEMOP_HLP_MIN_186();
12447 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12448 IEMOP_HLP_NO_LOCK_PREFIX();
12449 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12450}
12451
12452
12453/** Opcode 0xca. */
12454FNIEMOP_DEF(iemOp_retf_Iw)
12455{
12456 IEMOP_MNEMONIC("retf Iw");
12457 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12458 IEMOP_HLP_NO_LOCK_PREFIX();
12459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12460 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12461}
12462
12463
12464/** Opcode 0xcb. */
12465FNIEMOP_DEF(iemOp_retf)
12466{
12467 IEMOP_MNEMONIC("retf");
12468 IEMOP_HLP_NO_LOCK_PREFIX();
12469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12470 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12471}
12472
12473
12474/** Opcode 0xcc. */
12475FNIEMOP_DEF(iemOp_int_3)
12476{
12477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12478 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12479}
12480
12481
12482/** Opcode 0xcd. */
12483FNIEMOP_DEF(iemOp_int_Ib)
12484{
12485 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12487 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12488}
12489
12490
12491/** Opcode 0xce. */
12492FNIEMOP_DEF(iemOp_into)
12493{
12494 IEMOP_MNEMONIC("into");
12495 IEMOP_HLP_NO_64BIT();
12496
12497 IEM_MC_BEGIN(2, 0);
12498 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12499 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12500 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12501 IEM_MC_END();
12502 return VINF_SUCCESS;
12503}
12504
12505
12506/** Opcode 0xcf. */
12507FNIEMOP_DEF(iemOp_iret)
12508{
12509 IEMOP_MNEMONIC("iret");
12510 IEMOP_HLP_NO_LOCK_PREFIX();
12511 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12512}
12513
12514
12515/** Opcode 0xd0. */
12516FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12517{
12518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12519 PCIEMOPSHIFTSIZES pImpl;
12520 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12521 {
12522 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12523 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12524 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12525 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12526 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12527 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12528 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12529 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12531 }
12532 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12533
12534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12535 {
12536 /* register */
12537 IEMOP_HLP_NO_LOCK_PREFIX();
12538 IEM_MC_BEGIN(3, 0);
12539 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12540 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12541 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12542 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12543 IEM_MC_REF_EFLAGS(pEFlags);
12544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12545 IEM_MC_ADVANCE_RIP();
12546 IEM_MC_END();
12547 }
12548 else
12549 {
12550 /* memory */
12551 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12552 IEM_MC_BEGIN(3, 2);
12553 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12554 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12555 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12557
12558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12559 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12560 IEM_MC_FETCH_EFLAGS(EFlags);
12561 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12562
12563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12564 IEM_MC_COMMIT_EFLAGS(EFlags);
12565 IEM_MC_ADVANCE_RIP();
12566 IEM_MC_END();
12567 }
12568 return VINF_SUCCESS;
12569}
12570
12571
12572
12573/** Opcode 0xd1. */
12574FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12575{
12576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12577 PCIEMOPSHIFTSIZES pImpl;
12578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12579 {
12580 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12581 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12582 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12583 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12584 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12585 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12586 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12587 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12588 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12589 }
12590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12591
12592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12593 {
12594 /* register */
12595 IEMOP_HLP_NO_LOCK_PREFIX();
12596 switch (pIemCpu->enmEffOpSize)
12597 {
12598 case IEMMODE_16BIT:
12599 IEM_MC_BEGIN(3, 0);
12600 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12601 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12602 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12603 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12604 IEM_MC_REF_EFLAGS(pEFlags);
12605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12606 IEM_MC_ADVANCE_RIP();
12607 IEM_MC_END();
12608 return VINF_SUCCESS;
12609
12610 case IEMMODE_32BIT:
12611 IEM_MC_BEGIN(3, 0);
12612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12613 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12614 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12615 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12616 IEM_MC_REF_EFLAGS(pEFlags);
12617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12618 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12619 IEM_MC_ADVANCE_RIP();
12620 IEM_MC_END();
12621 return VINF_SUCCESS;
12622
12623 case IEMMODE_64BIT:
12624 IEM_MC_BEGIN(3, 0);
12625 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12626 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12627 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12628 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12629 IEM_MC_REF_EFLAGS(pEFlags);
12630 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12631 IEM_MC_ADVANCE_RIP();
12632 IEM_MC_END();
12633 return VINF_SUCCESS;
12634
12635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12636 }
12637 }
12638 else
12639 {
12640 /* memory */
12641 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12642 switch (pIemCpu->enmEffOpSize)
12643 {
12644 case IEMMODE_16BIT:
12645 IEM_MC_BEGIN(3, 2);
12646 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12647 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12648 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12650
12651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12652 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12653 IEM_MC_FETCH_EFLAGS(EFlags);
12654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12655
12656 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12657 IEM_MC_COMMIT_EFLAGS(EFlags);
12658 IEM_MC_ADVANCE_RIP();
12659 IEM_MC_END();
12660 return VINF_SUCCESS;
12661
12662 case IEMMODE_32BIT:
12663 IEM_MC_BEGIN(3, 2);
12664 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12665 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12666 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12668
12669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12670 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12671 IEM_MC_FETCH_EFLAGS(EFlags);
12672 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12673
12674 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12675 IEM_MC_COMMIT_EFLAGS(EFlags);
12676 IEM_MC_ADVANCE_RIP();
12677 IEM_MC_END();
12678 return VINF_SUCCESS;
12679
12680 case IEMMODE_64BIT:
12681 IEM_MC_BEGIN(3, 2);
12682 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12683 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12684 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12686
12687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12688 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12689 IEM_MC_FETCH_EFLAGS(EFlags);
12690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12691
12692 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12693 IEM_MC_COMMIT_EFLAGS(EFlags);
12694 IEM_MC_ADVANCE_RIP();
12695 IEM_MC_END();
12696 return VINF_SUCCESS;
12697
12698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12699 }
12700 }
12701}
12702
12703
12704/** Opcode 0xd2. */
12705FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12706{
12707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12708 PCIEMOPSHIFTSIZES pImpl;
12709 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12710 {
12711 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12712 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12713 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12714 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12715 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12716 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12717 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12718 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12719 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12720 }
12721 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12722
12723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12724 {
12725 /* register */
12726 IEMOP_HLP_NO_LOCK_PREFIX();
12727 IEM_MC_BEGIN(3, 0);
12728 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12729 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12731 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12732 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12733 IEM_MC_REF_EFLAGS(pEFlags);
12734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12735 IEM_MC_ADVANCE_RIP();
12736 IEM_MC_END();
12737 }
12738 else
12739 {
12740 /* memory */
12741 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12742 IEM_MC_BEGIN(3, 2);
12743 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12744 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12745 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12747
12748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12749 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12750 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12751 IEM_MC_FETCH_EFLAGS(EFlags);
12752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12753
12754 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12755 IEM_MC_COMMIT_EFLAGS(EFlags);
12756 IEM_MC_ADVANCE_RIP();
12757 IEM_MC_END();
12758 }
12759 return VINF_SUCCESS;
12760}
12761
12762
12763/** Opcode 0xd3. */
12764FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12765{
12766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12767 PCIEMOPSHIFTSIZES pImpl;
12768 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12769 {
12770 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12771 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12772 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12773 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12774 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12775 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12776 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12777 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12778 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12779 }
12780 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12781
12782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12783 {
12784 /* register */
12785 IEMOP_HLP_NO_LOCK_PREFIX();
12786 switch (pIemCpu->enmEffOpSize)
12787 {
12788 case IEMMODE_16BIT:
12789 IEM_MC_BEGIN(3, 0);
12790 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12791 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12793 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12794 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12795 IEM_MC_REF_EFLAGS(pEFlags);
12796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12797 IEM_MC_ADVANCE_RIP();
12798 IEM_MC_END();
12799 return VINF_SUCCESS;
12800
12801 case IEMMODE_32BIT:
12802 IEM_MC_BEGIN(3, 0);
12803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12804 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12806 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12807 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12808 IEM_MC_REF_EFLAGS(pEFlags);
12809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12810 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12811 IEM_MC_ADVANCE_RIP();
12812 IEM_MC_END();
12813 return VINF_SUCCESS;
12814
12815 case IEMMODE_64BIT:
12816 IEM_MC_BEGIN(3, 0);
12817 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12818 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12819 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12820 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12821 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12822 IEM_MC_REF_EFLAGS(pEFlags);
12823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12824 IEM_MC_ADVANCE_RIP();
12825 IEM_MC_END();
12826 return VINF_SUCCESS;
12827
12828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12829 }
12830 }
12831 else
12832 {
12833 /* memory */
12834 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12835 switch (pIemCpu->enmEffOpSize)
12836 {
12837 case IEMMODE_16BIT:
12838 IEM_MC_BEGIN(3, 2);
12839 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12840 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12841 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12843
12844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12845 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12846 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12847 IEM_MC_FETCH_EFLAGS(EFlags);
12848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12849
12850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12851 IEM_MC_COMMIT_EFLAGS(EFlags);
12852 IEM_MC_ADVANCE_RIP();
12853 IEM_MC_END();
12854 return VINF_SUCCESS;
12855
12856 case IEMMODE_32BIT:
12857 IEM_MC_BEGIN(3, 2);
12858 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12859 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12860 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12862
12863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12864 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12865 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12866 IEM_MC_FETCH_EFLAGS(EFlags);
12867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12868
12869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12870 IEM_MC_COMMIT_EFLAGS(EFlags);
12871 IEM_MC_ADVANCE_RIP();
12872 IEM_MC_END();
12873 return VINF_SUCCESS;
12874
12875 case IEMMODE_64BIT:
12876 IEM_MC_BEGIN(3, 2);
12877 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12878 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12879 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12881
12882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12883 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12884 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12885 IEM_MC_FETCH_EFLAGS(EFlags);
12886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12887
12888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12889 IEM_MC_COMMIT_EFLAGS(EFlags);
12890 IEM_MC_ADVANCE_RIP();
12891 IEM_MC_END();
12892 return VINF_SUCCESS;
12893
12894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12895 }
12896 }
12897}
12898
12899/** Opcode 0xd4. */
12900FNIEMOP_DEF(iemOp_aam_Ib)
12901{
12902 IEMOP_MNEMONIC("aam Ib");
12903 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12904 IEMOP_HLP_NO_LOCK_PREFIX();
12905 IEMOP_HLP_NO_64BIT();
12906 if (!bImm)
12907 return IEMOP_RAISE_DIVIDE_ERROR();
12908 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12909}
12910
12911
12912/** Opcode 0xd5. */
12913FNIEMOP_DEF(iemOp_aad_Ib)
12914{
12915 IEMOP_MNEMONIC("aad Ib");
12916 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12917 IEMOP_HLP_NO_LOCK_PREFIX();
12918 IEMOP_HLP_NO_64BIT();
12919 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12920}
12921
12922
12923/** Opcode 0xd6. */
12924FNIEMOP_DEF(iemOp_salc)
12925{
12926 IEMOP_MNEMONIC("salc");
12927 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12928 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12930 IEMOP_HLP_NO_64BIT();
12931
12932 IEM_MC_BEGIN(0, 0);
12933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12934 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12935 } IEM_MC_ELSE() {
12936 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12937 } IEM_MC_ENDIF();
12938 IEM_MC_ADVANCE_RIP();
12939 IEM_MC_END();
12940 return VINF_SUCCESS;
12941}
12942
12943
12944/** Opcode 0xd7. */
12945FNIEMOP_DEF(iemOp_xlat)
12946{
12947 IEMOP_MNEMONIC("xlat");
12948 IEMOP_HLP_NO_LOCK_PREFIX();
12949 switch (pIemCpu->enmEffAddrMode)
12950 {
12951 case IEMMODE_16BIT:
12952 IEM_MC_BEGIN(2, 0);
12953 IEM_MC_LOCAL(uint8_t, u8Tmp);
12954 IEM_MC_LOCAL(uint16_t, u16Addr);
12955 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12956 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12957 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12958 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12959 IEM_MC_ADVANCE_RIP();
12960 IEM_MC_END();
12961 return VINF_SUCCESS;
12962
12963 case IEMMODE_32BIT:
12964 IEM_MC_BEGIN(2, 0);
12965 IEM_MC_LOCAL(uint8_t, u8Tmp);
12966 IEM_MC_LOCAL(uint32_t, u32Addr);
12967 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12968 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12969 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12970 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12971 IEM_MC_ADVANCE_RIP();
12972 IEM_MC_END();
12973 return VINF_SUCCESS;
12974
12975 case IEMMODE_64BIT:
12976 IEM_MC_BEGIN(2, 0);
12977 IEM_MC_LOCAL(uint8_t, u8Tmp);
12978 IEM_MC_LOCAL(uint64_t, u64Addr);
12979 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12980 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12981 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12982 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12983 IEM_MC_ADVANCE_RIP();
12984 IEM_MC_END();
12985 return VINF_SUCCESS;
12986
12987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12988 }
12989}
12990
12991
12992/**
12993 * Common worker for FPU instructions working on ST0 and STn, and storing the
12994 * result in ST0.
12995 *
12996 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12997 */
12998FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12999{
13000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13001
13002 IEM_MC_BEGIN(3, 1);
13003 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13004 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13005 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13007
13008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13009 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13010 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13011 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13012 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13013 IEM_MC_ELSE()
13014 IEM_MC_FPU_STACK_UNDERFLOW(0);
13015 IEM_MC_ENDIF();
13016 IEM_MC_USED_FPU();
13017 IEM_MC_ADVANCE_RIP();
13018
13019 IEM_MC_END();
13020 return VINF_SUCCESS;
13021}
13022
13023
13024/**
13025 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13026 * flags.
13027 *
13028 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13029 */
13030FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13031{
13032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13033
13034 IEM_MC_BEGIN(3, 1);
13035 IEM_MC_LOCAL(uint16_t, u16Fsw);
13036 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13037 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13038 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13039
13040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13041 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13042 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13043 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13044 IEM_MC_UPDATE_FSW(u16Fsw);
13045 IEM_MC_ELSE()
13046 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13047 IEM_MC_ENDIF();
13048 IEM_MC_USED_FPU();
13049 IEM_MC_ADVANCE_RIP();
13050
13051 IEM_MC_END();
13052 return VINF_SUCCESS;
13053}
13054
13055
13056/**
13057 * Common worker for FPU instructions working on ST0 and STn, only affecting
13058 * flags, and popping when done.
13059 *
13060 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13061 */
13062FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13063{
13064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13065
13066 IEM_MC_BEGIN(3, 1);
13067 IEM_MC_LOCAL(uint16_t, u16Fsw);
13068 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13069 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13071
13072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13074 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13075 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13076 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13077 IEM_MC_ELSE()
13078 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13079 IEM_MC_ENDIF();
13080 IEM_MC_USED_FPU();
13081 IEM_MC_ADVANCE_RIP();
13082
13083 IEM_MC_END();
13084 return VINF_SUCCESS;
13085}
13086
13087
13088/** Opcode 0xd8 11/0. */
13089FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13090{
13091 IEMOP_MNEMONIC("fadd st0,stN");
13092 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13093}
13094
13095
13096/** Opcode 0xd8 11/1. */
13097FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13098{
13099 IEMOP_MNEMONIC("fmul st0,stN");
13100 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13101}
13102
13103
13104/** Opcode 0xd8 11/2. */
13105FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13106{
13107 IEMOP_MNEMONIC("fcom st0,stN");
13108 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13109}
13110
13111
13112/** Opcode 0xd8 11/3. */
13113FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13114{
13115 IEMOP_MNEMONIC("fcomp st0,stN");
13116 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13117}
13118
13119
13120/** Opcode 0xd8 11/4. */
13121FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13122{
13123 IEMOP_MNEMONIC("fsub st0,stN");
13124 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13125}
13126
13127
13128/** Opcode 0xd8 11/5. */
13129FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13130{
13131 IEMOP_MNEMONIC("fsubr st0,stN");
13132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13133}
13134
13135
13136/** Opcode 0xd8 11/6. */
13137FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13138{
13139 IEMOP_MNEMONIC("fdiv st0,stN");
13140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13141}
13142
13143
13144/** Opcode 0xd8 11/7. */
13145FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13146{
13147 IEMOP_MNEMONIC("fdivr st0,stN");
13148 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13149}
13150
13151
13152/**
13153 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13154 * the result in ST0.
13155 *
13156 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13157 */
13158FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13159{
13160 IEM_MC_BEGIN(3, 3);
13161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13162 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13163 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13164 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13165 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13166 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13167
13168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13170
13171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13172 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13173 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13174
13175 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13176 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13177 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13178 IEM_MC_ELSE()
13179 IEM_MC_FPU_STACK_UNDERFLOW(0);
13180 IEM_MC_ENDIF();
13181 IEM_MC_USED_FPU();
13182 IEM_MC_ADVANCE_RIP();
13183
13184 IEM_MC_END();
13185 return VINF_SUCCESS;
13186}
13187
13188
13189/** Opcode 0xd8 !11/0. */
13190FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13191{
13192 IEMOP_MNEMONIC("fadd st0,m32r");
13193 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13194}
13195
13196
13197/** Opcode 0xd8 !11/1. */
13198FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13199{
13200 IEMOP_MNEMONIC("fmul st0,m32r");
13201 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13202}
13203
13204
13205/** Opcode 0xd8 !11/2. */
13206FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13207{
13208 IEMOP_MNEMONIC("fcom st0,m32r");
13209
13210 IEM_MC_BEGIN(3, 3);
13211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13212 IEM_MC_LOCAL(uint16_t, u16Fsw);
13213 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13214 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13215 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13216 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13217
13218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13220
13221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13223 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13224
13225 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13226 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13227 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13228 IEM_MC_ELSE()
13229 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13230 IEM_MC_ENDIF();
13231 IEM_MC_USED_FPU();
13232 IEM_MC_ADVANCE_RIP();
13233
13234 IEM_MC_END();
13235 return VINF_SUCCESS;
13236}
13237
13238
13239/** Opcode 0xd8 !11/3. */
13240FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13241{
13242 IEMOP_MNEMONIC("fcomp st0,m32r");
13243
13244 IEM_MC_BEGIN(3, 3);
13245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13246 IEM_MC_LOCAL(uint16_t, u16Fsw);
13247 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13248 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13250 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13251
13252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13254
13255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13256 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13257 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13258
13259 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13260 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13261 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13262 IEM_MC_ELSE()
13263 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13264 IEM_MC_ENDIF();
13265 IEM_MC_USED_FPU();
13266 IEM_MC_ADVANCE_RIP();
13267
13268 IEM_MC_END();
13269 return VINF_SUCCESS;
13270}
13271
13272
13273/** Opcode 0xd8 !11/4. */
13274FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13275{
13276 IEMOP_MNEMONIC("fsub st0,m32r");
13277 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13278}
13279
13280
13281/** Opcode 0xd8 !11/5. */
13282FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13283{
13284 IEMOP_MNEMONIC("fsubr st0,m32r");
13285 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13286}
13287
13288
13289/** Opcode 0xd8 !11/6. */
13290FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13291{
13292 IEMOP_MNEMONIC("fdiv st0,m32r");
13293 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13294}
13295
13296
13297/** Opcode 0xd8 !11/7. */
13298FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13299{
13300 IEMOP_MNEMONIC("fdivr st0,m32r");
13301 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13302}
13303
13304
13305/** Opcode 0xd8. */
13306FNIEMOP_DEF(iemOp_EscF0)
13307{
13308 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13310
13311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13312 {
13313 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13314 {
13315 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13316 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13317 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13318 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13319 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13320 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13321 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13322 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13324 }
13325 }
13326 else
13327 {
13328 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13329 {
13330 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13331 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13332 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13333 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13334 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13335 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13336 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13337 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13339 }
13340 }
13341}
13342
13343
13344/** Opcode 0xd9 /0 mem32real
13345 * @sa iemOp_fld_m64r */
13346FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13347{
13348 IEMOP_MNEMONIC("fld m32r");
13349
13350 IEM_MC_BEGIN(2, 3);
13351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13352 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13353 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13354 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13355 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13356
13357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13359
13360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13362 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13363
13364 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13365 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13366 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13367 IEM_MC_ELSE()
13368 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13369 IEM_MC_ENDIF();
13370 IEM_MC_USED_FPU();
13371 IEM_MC_ADVANCE_RIP();
13372
13373 IEM_MC_END();
13374 return VINF_SUCCESS;
13375}
13376
13377
13378/** Opcode 0xd9 !11/2 mem32real */
13379FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13380{
13381 IEMOP_MNEMONIC("fst m32r");
13382 IEM_MC_BEGIN(3, 2);
13383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13384 IEM_MC_LOCAL(uint16_t, u16Fsw);
13385 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13386 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13387 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13388
13389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13393
13394 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13395 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13396 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13397 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13398 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13399 IEM_MC_ELSE()
13400 IEM_MC_IF_FCW_IM()
13401 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13402 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13403 IEM_MC_ENDIF();
13404 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13405 IEM_MC_ENDIF();
13406 IEM_MC_USED_FPU();
13407 IEM_MC_ADVANCE_RIP();
13408
13409 IEM_MC_END();
13410 return VINF_SUCCESS;
13411}
13412
13413
13414/** Opcode 0xd9 !11/3 */
13415FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13416{
13417 IEMOP_MNEMONIC("fstp m32r");
13418 IEM_MC_BEGIN(3, 2);
13419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13420 IEM_MC_LOCAL(uint16_t, u16Fsw);
13421 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13422 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13424
13425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13429
13430 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13431 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13432 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13433 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13434 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13435 IEM_MC_ELSE()
13436 IEM_MC_IF_FCW_IM()
13437 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13438 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13439 IEM_MC_ENDIF();
13440 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13441 IEM_MC_ENDIF();
13442 IEM_MC_USED_FPU();
13443 IEM_MC_ADVANCE_RIP();
13444
13445 IEM_MC_END();
13446 return VINF_SUCCESS;
13447}
13448
13449
13450/** Opcode 0xd9 !11/4 */
13451FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13452{
13453 IEMOP_MNEMONIC("fldenv m14/28byte");
13454 IEM_MC_BEGIN(3, 0);
13455 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13456 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13457 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13461 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13462 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13463 IEM_MC_END();
13464 return VINF_SUCCESS;
13465}
13466
13467
13468/** Opcode 0xd9 !11/5 */
13469FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13470{
13471 IEMOP_MNEMONIC("fldcw m2byte");
13472 IEM_MC_BEGIN(1, 1);
13473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13474 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13478 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13479 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13480 IEM_MC_END();
13481 return VINF_SUCCESS;
13482}
13483
13484
13485/** Opcode 0xd9 !11/6 */
13486FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13487{
13488 IEMOP_MNEMONIC("fstenv m14/m28byte");
13489 IEM_MC_BEGIN(3, 0);
13490 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13491 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13492 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13496 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13497 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13498 IEM_MC_END();
13499 return VINF_SUCCESS;
13500}
13501
13502
13503/** Opcode 0xd9 !11/7 */
13504FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13505{
13506 IEMOP_MNEMONIC("fnstcw m2byte");
13507 IEM_MC_BEGIN(2, 0);
13508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13509 IEM_MC_LOCAL(uint16_t, u16Fcw);
13510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13513 IEM_MC_FETCH_FCW(u16Fcw);
13514 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13515 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13516 IEM_MC_END();
13517 return VINF_SUCCESS;
13518}
13519
13520
13521/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13522FNIEMOP_DEF(iemOp_fnop)
13523{
13524 IEMOP_MNEMONIC("fnop");
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13526
13527 IEM_MC_BEGIN(0, 0);
13528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13529 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13530 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13531 * intel optimizations. Investigate. */
13532 IEM_MC_UPDATE_FPU_OPCODE_IP();
13533 IEM_MC_USED_FPU();
13534 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13535 IEM_MC_END();
13536 return VINF_SUCCESS;
13537}
13538
13539
13540/** Opcode 0xd9 11/0 stN */
13541FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13542{
13543 IEMOP_MNEMONIC("fld stN");
13544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13545
13546 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13547 * indicates that it does. */
13548 IEM_MC_BEGIN(0, 2);
13549 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13550 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13553 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13554 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13555 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13556 IEM_MC_ELSE()
13557 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13558 IEM_MC_ENDIF();
13559 IEM_MC_USED_FPU();
13560 IEM_MC_ADVANCE_RIP();
13561 IEM_MC_END();
13562
13563 return VINF_SUCCESS;
13564}
13565
13566
13567/** Opcode 0xd9 11/3 stN */
13568FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13569{
13570 IEMOP_MNEMONIC("fxch stN");
13571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13572
13573 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13574 * indicates that it does. */
13575 IEM_MC_BEGIN(1, 3);
13576 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13577 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13578 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13579 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13580 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13581 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13582 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13583 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13584 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13585 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13586 IEM_MC_ELSE()
13587 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13588 IEM_MC_ENDIF();
13589 IEM_MC_USED_FPU();
13590 IEM_MC_ADVANCE_RIP();
13591 IEM_MC_END();
13592
13593 return VINF_SUCCESS;
13594}
13595
13596
13597/** Opcode 0xd9 11/4, 0xdd 11/2. */
13598FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13599{
13600 IEMOP_MNEMONIC("fstp st0,stN");
13601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13602
13603 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13604 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13605 if (!iDstReg)
13606 {
13607 IEM_MC_BEGIN(0, 1);
13608 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13611 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13612 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13613 IEM_MC_ELSE()
13614 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13615 IEM_MC_ENDIF();
13616 IEM_MC_USED_FPU();
13617 IEM_MC_ADVANCE_RIP();
13618 IEM_MC_END();
13619 }
13620 else
13621 {
13622 IEM_MC_BEGIN(0, 2);
13623 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13624 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13627 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13628 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13629 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13630 IEM_MC_ELSE()
13631 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13632 IEM_MC_ENDIF();
13633 IEM_MC_USED_FPU();
13634 IEM_MC_ADVANCE_RIP();
13635 IEM_MC_END();
13636 }
13637 return VINF_SUCCESS;
13638}
13639
13640
13641/**
13642 * Common worker for FPU instructions working on ST0 and replaces it with the
13643 * result, i.e. unary operators.
13644 *
13645 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13646 */
13647FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13648{
13649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13650
13651 IEM_MC_BEGIN(2, 1);
13652 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13653 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13654 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13655
13656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13659 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13660 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13661 IEM_MC_ELSE()
13662 IEM_MC_FPU_STACK_UNDERFLOW(0);
13663 IEM_MC_ENDIF();
13664 IEM_MC_USED_FPU();
13665 IEM_MC_ADVANCE_RIP();
13666
13667 IEM_MC_END();
13668 return VINF_SUCCESS;
13669}
13670
13671
13672/** Opcode 0xd9 0xe0. */
13673FNIEMOP_DEF(iemOp_fchs)
13674{
13675 IEMOP_MNEMONIC("fchs st0");
13676 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13677}
13678
13679
13680/** Opcode 0xd9 0xe1. */
13681FNIEMOP_DEF(iemOp_fabs)
13682{
13683 IEMOP_MNEMONIC("fabs st0");
13684 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13685}
13686
13687
13688/**
13689 * Common worker for FPU instructions working on ST0 and only returns FSW.
13690 *
13691 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13692 */
13693FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13694{
13695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13696
13697 IEM_MC_BEGIN(2, 1);
13698 IEM_MC_LOCAL(uint16_t, u16Fsw);
13699 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13700 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13701
13702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13704 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13705 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13706 IEM_MC_UPDATE_FSW(u16Fsw);
13707 IEM_MC_ELSE()
13708 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13709 IEM_MC_ENDIF();
13710 IEM_MC_USED_FPU();
13711 IEM_MC_ADVANCE_RIP();
13712
13713 IEM_MC_END();
13714 return VINF_SUCCESS;
13715}
13716
13717
13718/** Opcode 0xd9 0xe4. */
13719FNIEMOP_DEF(iemOp_ftst)
13720{
13721 IEMOP_MNEMONIC("ftst st0");
13722 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13723}
13724
13725
13726/** Opcode 0xd9 0xe5. */
13727FNIEMOP_DEF(iemOp_fxam)
13728{
13729 IEMOP_MNEMONIC("fxam st0");
13730 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13731}
13732
13733
13734/**
13735 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13736 *
13737 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13738 */
13739FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13740{
13741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13742
13743 IEM_MC_BEGIN(1, 1);
13744 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13745 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13746
13747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13749 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13750 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13751 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13752 IEM_MC_ELSE()
13753 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13754 IEM_MC_ENDIF();
13755 IEM_MC_USED_FPU();
13756 IEM_MC_ADVANCE_RIP();
13757
13758 IEM_MC_END();
13759 return VINF_SUCCESS;
13760}
13761
13762
13763/** Opcode 0xd9 0xe8. */
13764FNIEMOP_DEF(iemOp_fld1)
13765{
13766 IEMOP_MNEMONIC("fld1");
13767 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13768}
13769
13770
13771/** Opcode 0xd9 0xe9. */
13772FNIEMOP_DEF(iemOp_fldl2t)
13773{
13774 IEMOP_MNEMONIC("fldl2t");
13775 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13776}
13777
13778
13779/** Opcode 0xd9 0xea. */
13780FNIEMOP_DEF(iemOp_fldl2e)
13781{
13782 IEMOP_MNEMONIC("fldl2e");
13783 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13784}
13785
13786/** Opcode 0xd9 0xeb. */
13787FNIEMOP_DEF(iemOp_fldpi)
13788{
13789 IEMOP_MNEMONIC("fldpi");
13790 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13791}
13792
13793
13794/** Opcode 0xd9 0xec. */
13795FNIEMOP_DEF(iemOp_fldlg2)
13796{
13797 IEMOP_MNEMONIC("fldlg2");
13798 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13799}
13800
13801/** Opcode 0xd9 0xed. */
13802FNIEMOP_DEF(iemOp_fldln2)
13803{
13804 IEMOP_MNEMONIC("fldln2");
13805 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13806}
13807
13808
13809/** Opcode 0xd9 0xee. */
13810FNIEMOP_DEF(iemOp_fldz)
13811{
13812 IEMOP_MNEMONIC("fldz");
13813 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13814}
13815
13816
13817/** Opcode 0xd9 0xf0. */
13818FNIEMOP_DEF(iemOp_f2xm1)
13819{
13820 IEMOP_MNEMONIC("f2xm1 st0");
13821 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13822}
13823
13824
13825/** Opcode 0xd9 0xf1. */
13826FNIEMOP_DEF(iemOp_fylx2)
13827{
13828 IEMOP_MNEMONIC("fylx2 st0");
13829 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13830}
13831
13832
13833/**
13834 * Common worker for FPU instructions working on ST0 and having two outputs, one
13835 * replacing ST0 and one pushed onto the stack.
13836 *
13837 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13838 */
13839FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13840{
13841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13842
13843 IEM_MC_BEGIN(2, 1);
13844 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13845 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13846 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13847
13848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13850 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13851 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13852 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13853 IEM_MC_ELSE()
13854 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13855 IEM_MC_ENDIF();
13856 IEM_MC_USED_FPU();
13857 IEM_MC_ADVANCE_RIP();
13858
13859 IEM_MC_END();
13860 return VINF_SUCCESS;
13861}
13862
13863
13864/** Opcode 0xd9 0xf2. */
13865FNIEMOP_DEF(iemOp_fptan)
13866{
13867 IEMOP_MNEMONIC("fptan st0");
13868 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13869}
13870
13871
13872/**
13873 * Common worker for FPU instructions working on STn and ST0, storing the result
13874 * in STn, and popping the stack unless IE, DE or ZE was raised.
13875 *
13876 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13877 */
13878FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13879{
13880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13881
13882 IEM_MC_BEGIN(3, 1);
13883 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13884 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13885 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13886 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13887
13888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13890
13891 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13892 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13893 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13894 IEM_MC_ELSE()
13895 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13896 IEM_MC_ENDIF();
13897 IEM_MC_USED_FPU();
13898 IEM_MC_ADVANCE_RIP();
13899
13900 IEM_MC_END();
13901 return VINF_SUCCESS;
13902}
13903
13904
13905/** Opcode 0xd9 0xf3. */
13906FNIEMOP_DEF(iemOp_fpatan)
13907{
13908 IEMOP_MNEMONIC("fpatan st1,st0");
13909 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13910}
13911
13912
13913/** Opcode 0xd9 0xf4. */
13914FNIEMOP_DEF(iemOp_fxtract)
13915{
13916 IEMOP_MNEMONIC("fxtract st0");
13917 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13918}
13919
13920
13921/** Opcode 0xd9 0xf5. */
13922FNIEMOP_DEF(iemOp_fprem1)
13923{
13924 IEMOP_MNEMONIC("fprem1 st0, st1");
13925 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13926}
13927
13928
13929/** Opcode 0xd9 0xf6. */
13930FNIEMOP_DEF(iemOp_fdecstp)
13931{
13932 IEMOP_MNEMONIC("fdecstp");
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13935 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13936 * FINCSTP and FDECSTP. */
13937
13938 IEM_MC_BEGIN(0,0);
13939
13940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13942
13943 IEM_MC_FPU_STACK_DEC_TOP();
13944 IEM_MC_UPDATE_FSW_CONST(0);
13945
13946 IEM_MC_USED_FPU();
13947 IEM_MC_ADVANCE_RIP();
13948 IEM_MC_END();
13949 return VINF_SUCCESS;
13950}
13951
13952
13953/** Opcode 0xd9 0xf7. */
13954FNIEMOP_DEF(iemOp_fincstp)
13955{
13956 IEMOP_MNEMONIC("fincstp");
13957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13958 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13959 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13960 * FINCSTP and FDECSTP. */
13961
13962 IEM_MC_BEGIN(0,0);
13963
13964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13966
13967 IEM_MC_FPU_STACK_INC_TOP();
13968 IEM_MC_UPDATE_FSW_CONST(0);
13969
13970 IEM_MC_USED_FPU();
13971 IEM_MC_ADVANCE_RIP();
13972 IEM_MC_END();
13973 return VINF_SUCCESS;
13974}
13975
13976
13977/** Opcode 0xd9 0xf8. */
13978FNIEMOP_DEF(iemOp_fprem)
13979{
13980 IEMOP_MNEMONIC("fprem st0, st1");
13981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13982}
13983
13984
13985/** Opcode 0xd9 0xf9. */
13986FNIEMOP_DEF(iemOp_fyl2xp1)
13987{
13988 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13989 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13990}
13991
13992
13993/** Opcode 0xd9 0xfa. */
13994FNIEMOP_DEF(iemOp_fsqrt)
13995{
13996 IEMOP_MNEMONIC("fsqrt st0");
13997 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13998}
13999
14000
14001/** Opcode 0xd9 0xfb. */
14002FNIEMOP_DEF(iemOp_fsincos)
14003{
14004 IEMOP_MNEMONIC("fsincos st0");
14005 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14006}
14007
14008
14009/** Opcode 0xd9 0xfc. */
14010FNIEMOP_DEF(iemOp_frndint)
14011{
14012 IEMOP_MNEMONIC("frndint st0");
14013 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14014}
14015
14016
14017/** Opcode 0xd9 0xfd. */
14018FNIEMOP_DEF(iemOp_fscale)
14019{
14020 IEMOP_MNEMONIC("fscale st0, st1");
14021 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14022}
14023
14024
14025/** Opcode 0xd9 0xfe. */
14026FNIEMOP_DEF(iemOp_fsin)
14027{
14028 IEMOP_MNEMONIC("fsin st0");
14029 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14030}
14031
14032
14033/** Opcode 0xd9 0xff. */
14034FNIEMOP_DEF(iemOp_fcos)
14035{
14036 IEMOP_MNEMONIC("fcos st0");
14037 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14038}
14039
14040
14041/** Used by iemOp_EscF1. */
14042static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14043{
14044 /* 0xe0 */ iemOp_fchs,
14045 /* 0xe1 */ iemOp_fabs,
14046 /* 0xe2 */ iemOp_Invalid,
14047 /* 0xe3 */ iemOp_Invalid,
14048 /* 0xe4 */ iemOp_ftst,
14049 /* 0xe5 */ iemOp_fxam,
14050 /* 0xe6 */ iemOp_Invalid,
14051 /* 0xe7 */ iemOp_Invalid,
14052 /* 0xe8 */ iemOp_fld1,
14053 /* 0xe9 */ iemOp_fldl2t,
14054 /* 0xea */ iemOp_fldl2e,
14055 /* 0xeb */ iemOp_fldpi,
14056 /* 0xec */ iemOp_fldlg2,
14057 /* 0xed */ iemOp_fldln2,
14058 /* 0xee */ iemOp_fldz,
14059 /* 0xef */ iemOp_Invalid,
14060 /* 0xf0 */ iemOp_f2xm1,
14061 /* 0xf1 */ iemOp_fylx2,
14062 /* 0xf2 */ iemOp_fptan,
14063 /* 0xf3 */ iemOp_fpatan,
14064 /* 0xf4 */ iemOp_fxtract,
14065 /* 0xf5 */ iemOp_fprem1,
14066 /* 0xf6 */ iemOp_fdecstp,
14067 /* 0xf7 */ iemOp_fincstp,
14068 /* 0xf8 */ iemOp_fprem,
14069 /* 0xf9 */ iemOp_fyl2xp1,
14070 /* 0xfa */ iemOp_fsqrt,
14071 /* 0xfb */ iemOp_fsincos,
14072 /* 0xfc */ iemOp_frndint,
14073 /* 0xfd */ iemOp_fscale,
14074 /* 0xfe */ iemOp_fsin,
14075 /* 0xff */ iemOp_fcos
14076};
14077
14078
14079/** Opcode 0xd9. */
14080FNIEMOP_DEF(iemOp_EscF1)
14081{
14082 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14085 {
14086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14087 {
14088 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14089 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14090 case 2:
14091 if (bRm == 0xd0)
14092 return FNIEMOP_CALL(iemOp_fnop);
14093 return IEMOP_RAISE_INVALID_OPCODE();
14094 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14095 case 4:
14096 case 5:
14097 case 6:
14098 case 7:
14099 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14100 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14102 }
14103 }
14104 else
14105 {
14106 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14107 {
14108 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14109 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14110 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14111 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14112 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14113 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14114 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14115 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14117 }
14118 }
14119}
14120
14121
14122/** Opcode 0xda 11/0. */
14123FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14124{
14125 IEMOP_MNEMONIC("fcmovb st0,stN");
14126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14127
14128 IEM_MC_BEGIN(0, 1);
14129 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14130
14131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14133
14134 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14136 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14137 IEM_MC_ENDIF();
14138 IEM_MC_UPDATE_FPU_OPCODE_IP();
14139 IEM_MC_ELSE()
14140 IEM_MC_FPU_STACK_UNDERFLOW(0);
14141 IEM_MC_ENDIF();
14142 IEM_MC_USED_FPU();
14143 IEM_MC_ADVANCE_RIP();
14144
14145 IEM_MC_END();
14146 return VINF_SUCCESS;
14147}
14148
14149
14150/** Opcode 0xda 11/1. */
14151FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14152{
14153 IEMOP_MNEMONIC("fcmove st0,stN");
14154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14155
14156 IEM_MC_BEGIN(0, 1);
14157 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14158
14159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14161
14162 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14163 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14164 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14165 IEM_MC_ENDIF();
14166 IEM_MC_UPDATE_FPU_OPCODE_IP();
14167 IEM_MC_ELSE()
14168 IEM_MC_FPU_STACK_UNDERFLOW(0);
14169 IEM_MC_ENDIF();
14170 IEM_MC_USED_FPU();
14171 IEM_MC_ADVANCE_RIP();
14172
14173 IEM_MC_END();
14174 return VINF_SUCCESS;
14175}
14176
14177
14178/** Opcode 0xda 11/2. */
14179FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14180{
14181 IEMOP_MNEMONIC("fcmovbe st0,stN");
14182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14183
14184 IEM_MC_BEGIN(0, 1);
14185 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14186
14187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14189
14190 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14191 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14192 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14193 IEM_MC_ENDIF();
14194 IEM_MC_UPDATE_FPU_OPCODE_IP();
14195 IEM_MC_ELSE()
14196 IEM_MC_FPU_STACK_UNDERFLOW(0);
14197 IEM_MC_ENDIF();
14198 IEM_MC_USED_FPU();
14199 IEM_MC_ADVANCE_RIP();
14200
14201 IEM_MC_END();
14202 return VINF_SUCCESS;
14203}
14204
14205
14206/** Opcode 0xda 11/3. */
14207FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14208{
14209 IEMOP_MNEMONIC("fcmovu st0,stN");
14210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14211
14212 IEM_MC_BEGIN(0, 1);
14213 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14214
14215 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14216 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14217
14218 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14220 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14221 IEM_MC_ENDIF();
14222 IEM_MC_UPDATE_FPU_OPCODE_IP();
14223 IEM_MC_ELSE()
14224 IEM_MC_FPU_STACK_UNDERFLOW(0);
14225 IEM_MC_ENDIF();
14226 IEM_MC_USED_FPU();
14227 IEM_MC_ADVANCE_RIP();
14228
14229 IEM_MC_END();
14230 return VINF_SUCCESS;
14231}
14232
14233
14234/**
14235 * Common worker for FPU instructions working on ST0 and STn, only affecting
14236 * flags, and popping twice when done.
14237 *
14238 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14239 */
14240FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14241{
14242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14243
14244 IEM_MC_BEGIN(3, 1);
14245 IEM_MC_LOCAL(uint16_t, u16Fsw);
14246 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14249
14250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14252 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14253 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14254 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14255 IEM_MC_ELSE()
14256 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14257 IEM_MC_ENDIF();
14258 IEM_MC_USED_FPU();
14259 IEM_MC_ADVANCE_RIP();
14260
14261 IEM_MC_END();
14262 return VINF_SUCCESS;
14263}
14264
14265
14266/** Opcode 0xda 0xe9. */
14267FNIEMOP_DEF(iemOp_fucompp)
14268{
14269 IEMOP_MNEMONIC("fucompp st0,stN");
14270 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14271}
14272
14273
14274/**
14275 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14276 * the result in ST0.
14277 *
14278 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14279 */
14280FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14281{
14282 IEM_MC_BEGIN(3, 3);
14283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14284 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14285 IEM_MC_LOCAL(int32_t, i32Val2);
14286 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14288 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14289
14290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14292
14293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14295 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14296
14297 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14298 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14299 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14300 IEM_MC_ELSE()
14301 IEM_MC_FPU_STACK_UNDERFLOW(0);
14302 IEM_MC_ENDIF();
14303 IEM_MC_USED_FPU();
14304 IEM_MC_ADVANCE_RIP();
14305
14306 IEM_MC_END();
14307 return VINF_SUCCESS;
14308}
14309
14310
14311/** Opcode 0xda !11/0. */
14312FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14313{
14314 IEMOP_MNEMONIC("fiadd m32i");
14315 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14316}
14317
14318
14319/** Opcode 0xda !11/1. */
14320FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14321{
14322 IEMOP_MNEMONIC("fimul m32i");
14323 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14324}
14325
14326
14327/** Opcode 0xda !11/2. */
14328FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14329{
14330 IEMOP_MNEMONIC("ficom st0,m32i");
14331
14332 IEM_MC_BEGIN(3, 3);
14333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14334 IEM_MC_LOCAL(uint16_t, u16Fsw);
14335 IEM_MC_LOCAL(int32_t, i32Val2);
14336 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14337 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14338 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14339
14340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14342
14343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14345 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14346
14347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14349 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14350 IEM_MC_ELSE()
14351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14352 IEM_MC_ENDIF();
14353 IEM_MC_USED_FPU();
14354 IEM_MC_ADVANCE_RIP();
14355
14356 IEM_MC_END();
14357 return VINF_SUCCESS;
14358}
14359
14360
14361/** Opcode 0xda !11/3. */
14362FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14363{
14364 IEMOP_MNEMONIC("ficomp st0,m32i");
14365
14366 IEM_MC_BEGIN(3, 3);
14367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14368 IEM_MC_LOCAL(uint16_t, u16Fsw);
14369 IEM_MC_LOCAL(int32_t, i32Val2);
14370 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14372 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14373
14374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14376
14377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14379 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14380
14381 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14382 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14383 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14384 IEM_MC_ELSE()
14385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14386 IEM_MC_ENDIF();
14387 IEM_MC_USED_FPU();
14388 IEM_MC_ADVANCE_RIP();
14389
14390 IEM_MC_END();
14391 return VINF_SUCCESS;
14392}
14393
14394
14395/** Opcode 0xda !11/4. */
14396FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14397{
14398 IEMOP_MNEMONIC("fisub m32i");
14399 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14400}
14401
14402
14403/** Opcode 0xda !11/5. */
14404FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14405{
14406 IEMOP_MNEMONIC("fisubr m32i");
14407 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14408}
14409
14410
14411/** Opcode 0xda !11/6. */
14412FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14413{
14414 IEMOP_MNEMONIC("fidiv m32i");
14415 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14416}
14417
14418
14419/** Opcode 0xda !11/7. */
14420FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14421{
14422 IEMOP_MNEMONIC("fidivr m32i");
14423 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14424}
14425
14426
14427/** Opcode 0xda. */
14428FNIEMOP_DEF(iemOp_EscF2)
14429{
14430 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14432 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14433 {
14434 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14435 {
14436 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14437 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14438 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14439 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14440 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14441 case 5:
14442 if (bRm == 0xe9)
14443 return FNIEMOP_CALL(iemOp_fucompp);
14444 return IEMOP_RAISE_INVALID_OPCODE();
14445 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14446 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14448 }
14449 }
14450 else
14451 {
14452 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14453 {
14454 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14455 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14456 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14457 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14458 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14459 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14460 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14461 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14463 }
14464 }
14465}
14466
14467
14468/** Opcode 0xdb !11/0. */
14469FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14470{
14471 IEMOP_MNEMONIC("fild m32i");
14472
14473 IEM_MC_BEGIN(2, 3);
14474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14475 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14476 IEM_MC_LOCAL(int32_t, i32Val);
14477 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14478 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14479
14480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14482
14483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14484 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14485 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14486
14487 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14488 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14489 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14490 IEM_MC_ELSE()
14491 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14492 IEM_MC_ENDIF();
14493 IEM_MC_USED_FPU();
14494 IEM_MC_ADVANCE_RIP();
14495
14496 IEM_MC_END();
14497 return VINF_SUCCESS;
14498}
14499
14500
14501/** Opcode 0xdb !11/1. */
14502FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14503{
14504 IEMOP_MNEMONIC("fisttp m32i");
14505 IEM_MC_BEGIN(3, 2);
14506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14507 IEM_MC_LOCAL(uint16_t, u16Fsw);
14508 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14509 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14510 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14511
14512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14516
14517 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14519 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14520 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14521 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14522 IEM_MC_ELSE()
14523 IEM_MC_IF_FCW_IM()
14524 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14525 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14526 IEM_MC_ENDIF();
14527 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14528 IEM_MC_ENDIF();
14529 IEM_MC_USED_FPU();
14530 IEM_MC_ADVANCE_RIP();
14531
14532 IEM_MC_END();
14533 return VINF_SUCCESS;
14534}
14535
14536
14537/** Opcode 0xdb !11/2. */
14538FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14539{
14540 IEMOP_MNEMONIC("fist m32i");
14541 IEM_MC_BEGIN(3, 2);
14542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14543 IEM_MC_LOCAL(uint16_t, u16Fsw);
14544 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14545 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14546 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14547
14548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14551 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14552
14553 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14554 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14555 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14556 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14557 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14558 IEM_MC_ELSE()
14559 IEM_MC_IF_FCW_IM()
14560 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14561 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14562 IEM_MC_ENDIF();
14563 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14564 IEM_MC_ENDIF();
14565 IEM_MC_USED_FPU();
14566 IEM_MC_ADVANCE_RIP();
14567
14568 IEM_MC_END();
14569 return VINF_SUCCESS;
14570}
14571
14572
14573/** Opcode 0xdb !11/3. */
14574FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14575{
14576 IEMOP_MNEMONIC("fisttp m32i");
14577 IEM_MC_BEGIN(3, 2);
14578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14579 IEM_MC_LOCAL(uint16_t, u16Fsw);
14580 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14581 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14583
14584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14588
14589 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14590 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14591 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14592 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14593 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14594 IEM_MC_ELSE()
14595 IEM_MC_IF_FCW_IM()
14596 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14597 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14598 IEM_MC_ENDIF();
14599 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14600 IEM_MC_ENDIF();
14601 IEM_MC_USED_FPU();
14602 IEM_MC_ADVANCE_RIP();
14603
14604 IEM_MC_END();
14605 return VINF_SUCCESS;
14606}
14607
14608
14609/** Opcode 0xdb !11/5. */
14610FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14611{
14612 IEMOP_MNEMONIC("fld m80r");
14613
14614 IEM_MC_BEGIN(2, 3);
14615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14616 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14617 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14618 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14619 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14620
14621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14623
14624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14626 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14627
14628 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14629 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14630 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14631 IEM_MC_ELSE()
14632 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14633 IEM_MC_ENDIF();
14634 IEM_MC_USED_FPU();
14635 IEM_MC_ADVANCE_RIP();
14636
14637 IEM_MC_END();
14638 return VINF_SUCCESS;
14639}
14640
14641
14642/** Opcode 0xdb !11/7. */
14643FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14644{
14645 IEMOP_MNEMONIC("fstp m80r");
14646 IEM_MC_BEGIN(3, 2);
14647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14648 IEM_MC_LOCAL(uint16_t, u16Fsw);
14649 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14650 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14652
14653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14657
14658 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14660 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14661 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14662 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14663 IEM_MC_ELSE()
14664 IEM_MC_IF_FCW_IM()
14665 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14666 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14667 IEM_MC_ENDIF();
14668 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14669 IEM_MC_ENDIF();
14670 IEM_MC_USED_FPU();
14671 IEM_MC_ADVANCE_RIP();
14672
14673 IEM_MC_END();
14674 return VINF_SUCCESS;
14675}
14676
14677
14678/** Opcode 0xdb 11/0. */
14679FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14680{
14681 IEMOP_MNEMONIC("fcmovnb st0,stN");
14682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14683
14684 IEM_MC_BEGIN(0, 1);
14685 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14686
14687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14689
14690 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14691 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14692 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14693 IEM_MC_ENDIF();
14694 IEM_MC_UPDATE_FPU_OPCODE_IP();
14695 IEM_MC_ELSE()
14696 IEM_MC_FPU_STACK_UNDERFLOW(0);
14697 IEM_MC_ENDIF();
14698 IEM_MC_USED_FPU();
14699 IEM_MC_ADVANCE_RIP();
14700
14701 IEM_MC_END();
14702 return VINF_SUCCESS;
14703}
14704
14705
14706/** Opcode 0xdb 11/1. */
14707FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14708{
14709 IEMOP_MNEMONIC("fcmovne st0,stN");
14710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14711
14712 IEM_MC_BEGIN(0, 1);
14713 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14714
14715 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14716 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14717
14718 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14719 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14720 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14721 IEM_MC_ENDIF();
14722 IEM_MC_UPDATE_FPU_OPCODE_IP();
14723 IEM_MC_ELSE()
14724 IEM_MC_FPU_STACK_UNDERFLOW(0);
14725 IEM_MC_ENDIF();
14726 IEM_MC_USED_FPU();
14727 IEM_MC_ADVANCE_RIP();
14728
14729 IEM_MC_END();
14730 return VINF_SUCCESS;
14731}
14732
14733
14734/** Opcode 0xdb 11/2. */
14735FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14736{
14737 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14739
14740 IEM_MC_BEGIN(0, 1);
14741 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14742
14743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14745
14746 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14747 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14748 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14749 IEM_MC_ENDIF();
14750 IEM_MC_UPDATE_FPU_OPCODE_IP();
14751 IEM_MC_ELSE()
14752 IEM_MC_FPU_STACK_UNDERFLOW(0);
14753 IEM_MC_ENDIF();
14754 IEM_MC_USED_FPU();
14755 IEM_MC_ADVANCE_RIP();
14756
14757 IEM_MC_END();
14758 return VINF_SUCCESS;
14759}
14760
14761
14762/** Opcode 0xdb 11/3. */
14763FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14764{
14765 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14767
14768 IEM_MC_BEGIN(0, 1);
14769 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14770
14771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14773
14774 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14775 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14776 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14777 IEM_MC_ENDIF();
14778 IEM_MC_UPDATE_FPU_OPCODE_IP();
14779 IEM_MC_ELSE()
14780 IEM_MC_FPU_STACK_UNDERFLOW(0);
14781 IEM_MC_ENDIF();
14782 IEM_MC_USED_FPU();
14783 IEM_MC_ADVANCE_RIP();
14784
14785 IEM_MC_END();
14786 return VINF_SUCCESS;
14787}
14788
14789
14790/** Opcode 0xdb 0xe0. */
14791FNIEMOP_DEF(iemOp_fneni)
14792{
14793 IEMOP_MNEMONIC("fneni (8087/ign)");
14794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14795 IEM_MC_BEGIN(0,0);
14796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14797 IEM_MC_ADVANCE_RIP();
14798 IEM_MC_END();
14799 return VINF_SUCCESS;
14800}
14801
14802
14803/** Opcode 0xdb 0xe1. */
14804FNIEMOP_DEF(iemOp_fndisi)
14805{
14806 IEMOP_MNEMONIC("fndisi (8087/ign)");
14807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14808 IEM_MC_BEGIN(0,0);
14809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14810 IEM_MC_ADVANCE_RIP();
14811 IEM_MC_END();
14812 return VINF_SUCCESS;
14813}
14814
14815
14816/** Opcode 0xdb 0xe2. */
14817FNIEMOP_DEF(iemOp_fnclex)
14818{
14819 IEMOP_MNEMONIC("fnclex");
14820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14821
14822 IEM_MC_BEGIN(0,0);
14823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14824 IEM_MC_CLEAR_FSW_EX();
14825 IEM_MC_ADVANCE_RIP();
14826 IEM_MC_END();
14827 return VINF_SUCCESS;
14828}
14829
14830
14831/** Opcode 0xdb 0xe3. */
14832FNIEMOP_DEF(iemOp_fninit)
14833{
14834 IEMOP_MNEMONIC("fninit");
14835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14836 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14837}
14838
14839
14840/** Opcode 0xdb 0xe4. */
14841FNIEMOP_DEF(iemOp_fnsetpm)
14842{
14843 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14845 IEM_MC_BEGIN(0,0);
14846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14847 IEM_MC_ADVANCE_RIP();
14848 IEM_MC_END();
14849 return VINF_SUCCESS;
14850}
14851
14852
14853/** Opcode 0xdb 0xe5. */
14854FNIEMOP_DEF(iemOp_frstpm)
14855{
14856 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14857#if 0 /* #UDs on newer CPUs */
14858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14859 IEM_MC_BEGIN(0,0);
14860 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14861 IEM_MC_ADVANCE_RIP();
14862 IEM_MC_END();
14863 return VINF_SUCCESS;
14864#else
14865 return IEMOP_RAISE_INVALID_OPCODE();
14866#endif
14867}
14868
14869
14870/** Opcode 0xdb 11/5. */
14871FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14872{
14873 IEMOP_MNEMONIC("fucomi st0,stN");
14874 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14875}
14876
14877
14878/** Opcode 0xdb 11/6. */
14879FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14880{
14881 IEMOP_MNEMONIC("fcomi st0,stN");
14882 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14883}
14884
14885
14886/** Opcode 0xdb. */
14887FNIEMOP_DEF(iemOp_EscF3)
14888{
14889 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14892 {
14893 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14894 {
14895 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14896 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14897 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14898 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14899 case 4:
14900 switch (bRm)
14901 {
14902 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14903 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14904 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14905 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14906 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14907 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14908 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14909 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14911 }
14912 break;
14913 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14914 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14915 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14917 }
14918 }
14919 else
14920 {
14921 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14922 {
14923 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14924 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14925 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14926 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14927 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14928 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14929 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14930 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14932 }
14933 }
14934}
14935
14936
14937/**
14938 * Common worker for FPU instructions working on STn and ST0, and storing the
14939 * result in STn unless IE, DE or ZE was raised.
14940 *
14941 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14942 */
14943FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14944{
14945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14946
14947 IEM_MC_BEGIN(3, 1);
14948 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14949 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14950 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14951 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14952
14953 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14954 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14955
14956 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14957 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14958 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14959 IEM_MC_ELSE()
14960 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14961 IEM_MC_ENDIF();
14962 IEM_MC_USED_FPU();
14963 IEM_MC_ADVANCE_RIP();
14964
14965 IEM_MC_END();
14966 return VINF_SUCCESS;
14967}
14968
14969
14970/** Opcode 0xdc 11/0. */
14971FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14972{
14973 IEMOP_MNEMONIC("fadd stN,st0");
14974 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14975}
14976
14977
14978/** Opcode 0xdc 11/1. */
14979FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14980{
14981 IEMOP_MNEMONIC("fmul stN,st0");
14982 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14983}
14984
14985
14986/** Opcode 0xdc 11/4. */
14987FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14988{
14989 IEMOP_MNEMONIC("fsubr stN,st0");
14990 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14991}
14992
14993
14994/** Opcode 0xdc 11/5. */
14995FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14996{
14997 IEMOP_MNEMONIC("fsub stN,st0");
14998 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14999}
15000
15001
15002/** Opcode 0xdc 11/6. */
15003FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15004{
15005 IEMOP_MNEMONIC("fdivr stN,st0");
15006 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15007}
15008
15009
15010/** Opcode 0xdc 11/7. */
15011FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15012{
15013 IEMOP_MNEMONIC("fdiv stN,st0");
15014 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15015}
15016
15017
15018/**
15019 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15020 * memory operand, and storing the result in ST0.
15021 *
15022 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15023 */
15024FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15025{
15026 IEM_MC_BEGIN(3, 3);
15027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15029 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15030 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15031 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15032 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15033
15034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15038
15039 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15040 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15041 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15042 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15043 IEM_MC_ELSE()
15044 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15045 IEM_MC_ENDIF();
15046 IEM_MC_USED_FPU();
15047 IEM_MC_ADVANCE_RIP();
15048
15049 IEM_MC_END();
15050 return VINF_SUCCESS;
15051}
15052
15053
15054/** Opcode 0xdc !11/0. */
15055FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15056{
15057 IEMOP_MNEMONIC("fadd m64r");
15058 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15059}
15060
15061
15062/** Opcode 0xdc !11/1. */
15063FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15064{
15065 IEMOP_MNEMONIC("fmul m64r");
15066 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15067}
15068
15069
15070/** Opcode 0xdc !11/2. */
15071FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15072{
15073 IEMOP_MNEMONIC("fcom st0,m64r");
15074
15075 IEM_MC_BEGIN(3, 3);
15076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15077 IEM_MC_LOCAL(uint16_t, u16Fsw);
15078 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15079 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15081 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15082
15083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15085
15086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15088 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15089
15090 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15091 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15092 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15093 IEM_MC_ELSE()
15094 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15095 IEM_MC_ENDIF();
15096 IEM_MC_USED_FPU();
15097 IEM_MC_ADVANCE_RIP();
15098
15099 IEM_MC_END();
15100 return VINF_SUCCESS;
15101}
15102
15103
15104/** Opcode 0xdc !11/3. */
15105FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15106{
15107 IEMOP_MNEMONIC("fcomp st0,m64r");
15108
15109 IEM_MC_BEGIN(3, 3);
15110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15111 IEM_MC_LOCAL(uint16_t, u16Fsw);
15112 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15115 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15116
15117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15119
15120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15122 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15123
15124 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15125 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15126 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15127 IEM_MC_ELSE()
15128 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15129 IEM_MC_ENDIF();
15130 IEM_MC_USED_FPU();
15131 IEM_MC_ADVANCE_RIP();
15132
15133 IEM_MC_END();
15134 return VINF_SUCCESS;
15135}
15136
15137
15138/** Opcode 0xdc !11/4. */
15139FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15140{
15141 IEMOP_MNEMONIC("fsub m64r");
15142 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15143}
15144
15145
15146/** Opcode 0xdc !11/5. */
15147FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15148{
15149 IEMOP_MNEMONIC("fsubr m64r");
15150 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15151}
15152
15153
15154/** Opcode 0xdc !11/6. */
15155FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15156{
15157 IEMOP_MNEMONIC("fdiv m64r");
15158 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15159}
15160
15161
15162/** Opcode 0xdc !11/7. */
15163FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15164{
15165 IEMOP_MNEMONIC("fdivr m64r");
15166 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15167}
15168
15169
15170/** Opcode 0xdc. */
15171FNIEMOP_DEF(iemOp_EscF4)
15172{
15173 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15176 {
15177 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15178 {
15179 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15180 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15181 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15182 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15183 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15184 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15185 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15186 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15188 }
15189 }
15190 else
15191 {
15192 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15193 {
15194 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15195 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15196 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15197 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15198 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15199 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15200 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15201 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15203 }
15204 }
15205}
15206
15207
15208/** Opcode 0xdd !11/0.
15209 * @sa iemOp_fld_m32r */
15210FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15211{
15212 IEMOP_MNEMONIC("fld m64r");
15213
15214 IEM_MC_BEGIN(2, 3);
15215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15216 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15217 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15218 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15219 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15220
15221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15225
15226 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15227 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15228 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15229 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15230 IEM_MC_ELSE()
15231 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15232 IEM_MC_ENDIF();
15233 IEM_MC_USED_FPU();
15234 IEM_MC_ADVANCE_RIP();
15235
15236 IEM_MC_END();
15237 return VINF_SUCCESS;
15238}
15239
15240
15241/** Opcode 0xdd !11/0. */
15242FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15243{
15244 IEMOP_MNEMONIC("fisttp m64i");
15245 IEM_MC_BEGIN(3, 2);
15246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15247 IEM_MC_LOCAL(uint16_t, u16Fsw);
15248 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15249 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15251
15252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15255 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15256
15257 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15258 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15259 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15260 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15261 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15262 IEM_MC_ELSE()
15263 IEM_MC_IF_FCW_IM()
15264 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15265 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15266 IEM_MC_ENDIF();
15267 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15268 IEM_MC_ENDIF();
15269 IEM_MC_USED_FPU();
15270 IEM_MC_ADVANCE_RIP();
15271
15272 IEM_MC_END();
15273 return VINF_SUCCESS;
15274}
15275
15276
15277/** Opcode 0xdd !11/0. */
15278FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15279{
15280 IEMOP_MNEMONIC("fst m64r");
15281 IEM_MC_BEGIN(3, 2);
15282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15283 IEM_MC_LOCAL(uint16_t, u16Fsw);
15284 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15285 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15286 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15287
15288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15290 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15291 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15292
15293 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15294 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15295 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15296 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15297 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15298 IEM_MC_ELSE()
15299 IEM_MC_IF_FCW_IM()
15300 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15301 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15302 IEM_MC_ENDIF();
15303 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15304 IEM_MC_ENDIF();
15305 IEM_MC_USED_FPU();
15306 IEM_MC_ADVANCE_RIP();
15307
15308 IEM_MC_END();
15309 return VINF_SUCCESS;
15310}
15311
15312
15313
15314
15315/** Opcode 0xdd !11/0. */
15316FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15317{
15318 IEMOP_MNEMONIC("fstp m64r");
15319 IEM_MC_BEGIN(3, 2);
15320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15321 IEM_MC_LOCAL(uint16_t, u16Fsw);
15322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15323 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15325
15326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15330
15331 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15332 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15333 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15334 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15335 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15336 IEM_MC_ELSE()
15337 IEM_MC_IF_FCW_IM()
15338 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15339 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15340 IEM_MC_ENDIF();
15341 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15342 IEM_MC_ENDIF();
15343 IEM_MC_USED_FPU();
15344 IEM_MC_ADVANCE_RIP();
15345
15346 IEM_MC_END();
15347 return VINF_SUCCESS;
15348}
15349
15350
15351/** Opcode 0xdd !11/0. */
15352FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15353{
15354 IEMOP_MNEMONIC("frstor m94/108byte");
15355 IEM_MC_BEGIN(3, 0);
15356 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15357 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15358 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15362 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15363 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15364 IEM_MC_END();
15365 return VINF_SUCCESS;
15366}
15367
15368
15369/** Opcode 0xdd !11/0. */
15370FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15371{
15372 IEMOP_MNEMONIC("fnsave m94/108byte");
15373 IEM_MC_BEGIN(3, 0);
15374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15375 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15376 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15379 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15380 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15381 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15382 IEM_MC_END();
15383 return VINF_SUCCESS;
15384
15385}
15386
15387/** Opcode 0xdd !11/0. */
15388FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15389{
15390 IEMOP_MNEMONIC("fnstsw m16");
15391
15392 IEM_MC_BEGIN(0, 2);
15393 IEM_MC_LOCAL(uint16_t, u16Tmp);
15394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15395
15396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15399
15400 IEM_MC_FETCH_FSW(u16Tmp);
15401 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15402 IEM_MC_ADVANCE_RIP();
15403
15404/** @todo Debug / drop a hint to the verifier that things may differ
15405 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15406 * NT4SP1. (X86_FSW_PE) */
15407 IEM_MC_END();
15408 return VINF_SUCCESS;
15409}
15410
15411
15412/** Opcode 0xdd 11/0. */
15413FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15414{
15415 IEMOP_MNEMONIC("ffree stN");
15416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15417 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15418 unmodified. */
15419
15420 IEM_MC_BEGIN(0, 0);
15421
15422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15424
15425 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15426 IEM_MC_UPDATE_FPU_OPCODE_IP();
15427
15428 IEM_MC_USED_FPU();
15429 IEM_MC_ADVANCE_RIP();
15430 IEM_MC_END();
15431 return VINF_SUCCESS;
15432}
15433
15434
15435/** Opcode 0xdd 11/1. */
15436FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15437{
15438 IEMOP_MNEMONIC("fst st0,stN");
15439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15440
15441 IEM_MC_BEGIN(0, 2);
15442 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15443 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15445 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15446 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15447 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15448 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15449 IEM_MC_ELSE()
15450 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15451 IEM_MC_ENDIF();
15452 IEM_MC_USED_FPU();
15453 IEM_MC_ADVANCE_RIP();
15454 IEM_MC_END();
15455 return VINF_SUCCESS;
15456}
15457
15458
15459/** Opcode 0xdd 11/3. */
15460FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15461{
15462 IEMOP_MNEMONIC("fcom st0,stN");
15463 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15464}
15465
15466
15467/** Opcode 0xdd 11/4. */
15468FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15469{
15470 IEMOP_MNEMONIC("fcomp st0,stN");
15471 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15472}
15473
15474
15475/** Opcode 0xdd. */
15476FNIEMOP_DEF(iemOp_EscF5)
15477{
15478 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15481 {
15482 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15483 {
15484 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15485 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15486 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15487 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15488 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15489 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15490 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15491 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15493 }
15494 }
15495 else
15496 {
15497 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15498 {
15499 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15500 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15501 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15502 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15503 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15504 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15505 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15506 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15508 }
15509 }
15510}
15511
15512
15513/** Opcode 0xde 11/0. */
15514FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15515{
15516 IEMOP_MNEMONIC("faddp stN,st0");
15517 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15518}
15519
15520
15521/** Opcode 0xde 11/0. */
15522FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15523{
15524 IEMOP_MNEMONIC("fmulp stN,st0");
15525 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15526}
15527
15528
15529/** Opcode 0xde 0xd9. */
15530FNIEMOP_DEF(iemOp_fcompp)
15531{
15532 IEMOP_MNEMONIC("fucompp st0,stN");
15533 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15534}
15535
15536
15537/** Opcode 0xde 11/4. */
15538FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15539{
15540 IEMOP_MNEMONIC("fsubrp stN,st0");
15541 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15542}
15543
15544
15545/** Opcode 0xde 11/5. */
15546FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15547{
15548 IEMOP_MNEMONIC("fsubp stN,st0");
15549 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15550}
15551
15552
15553/** Opcode 0xde 11/6. */
15554FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15555{
15556 IEMOP_MNEMONIC("fdivrp stN,st0");
15557 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15558}
15559
15560
15561/** Opcode 0xde 11/7. */
15562FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15563{
15564 IEMOP_MNEMONIC("fdivp stN,st0");
15565 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15566}
15567
15568
15569/**
15570 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15571 * the result in ST0.
15572 *
15573 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15574 */
15575FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15576{
15577 IEM_MC_BEGIN(3, 3);
15578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15579 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15580 IEM_MC_LOCAL(int16_t, i16Val2);
15581 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15583 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15584
15585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15587
15588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15590 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15591
15592 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15593 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15594 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15595 IEM_MC_ELSE()
15596 IEM_MC_FPU_STACK_UNDERFLOW(0);
15597 IEM_MC_ENDIF();
15598 IEM_MC_USED_FPU();
15599 IEM_MC_ADVANCE_RIP();
15600
15601 IEM_MC_END();
15602 return VINF_SUCCESS;
15603}
15604
15605
15606/** Opcode 0xde !11/0. */
15607FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15608{
15609 IEMOP_MNEMONIC("fiadd m16i");
15610 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15611}
15612
15613
15614/** Opcode 0xde !11/1. */
15615FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15616{
15617 IEMOP_MNEMONIC("fimul m16i");
15618 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15619}
15620
15621
15622/** Opcode 0xde !11/2. */
15623FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15624{
15625 IEMOP_MNEMONIC("ficom st0,m16i");
15626
15627 IEM_MC_BEGIN(3, 3);
15628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15629 IEM_MC_LOCAL(uint16_t, u16Fsw);
15630 IEM_MC_LOCAL(int16_t, i16Val2);
15631 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15632 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15633 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15634
15635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15637
15638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15640 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15641
15642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15643 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15644 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15645 IEM_MC_ELSE()
15646 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15647 IEM_MC_ENDIF();
15648 IEM_MC_USED_FPU();
15649 IEM_MC_ADVANCE_RIP();
15650
15651 IEM_MC_END();
15652 return VINF_SUCCESS;
15653}
15654
15655
15656/** Opcode 0xde !11/3. */
15657FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15658{
15659 IEMOP_MNEMONIC("ficomp st0,m16i");
15660
15661 IEM_MC_BEGIN(3, 3);
15662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15663 IEM_MC_LOCAL(uint16_t, u16Fsw);
15664 IEM_MC_LOCAL(int16_t, i16Val2);
15665 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15667 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15668
15669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15671
15672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15674 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15675
15676 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15677 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15678 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15679 IEM_MC_ELSE()
15680 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15681 IEM_MC_ENDIF();
15682 IEM_MC_USED_FPU();
15683 IEM_MC_ADVANCE_RIP();
15684
15685 IEM_MC_END();
15686 return VINF_SUCCESS;
15687}
15688
15689
15690/** Opcode 0xde !11/4. */
15691FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15692{
15693 IEMOP_MNEMONIC("fisub m16i");
15694 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15695}
15696
15697
15698/** Opcode 0xde !11/5. */
15699FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15700{
15701 IEMOP_MNEMONIC("fisubr m16i");
15702 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15703}
15704
15705
15706/** Opcode 0xde !11/6. */
15707FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15708{
15709 IEMOP_MNEMONIC("fiadd m16i");
15710 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15711}
15712
15713
15714/** Opcode 0xde !11/7. */
15715FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15716{
15717 IEMOP_MNEMONIC("fiadd m16i");
15718 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15719}
15720
15721
15722/** Opcode 0xde. */
15723FNIEMOP_DEF(iemOp_EscF6)
15724{
15725 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15728 {
15729 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15730 {
15731 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15732 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15733 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15734 case 3: if (bRm == 0xd9)
15735 return FNIEMOP_CALL(iemOp_fcompp);
15736 return IEMOP_RAISE_INVALID_OPCODE();
15737 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15738 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15739 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15740 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15742 }
15743 }
15744 else
15745 {
15746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15747 {
15748 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15749 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15750 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15751 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15752 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15753 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15754 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15755 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15757 }
15758 }
15759}
15760
15761
15762/** Opcode 0xdf 11/0.
15763 * Undocument instruction, assumed to work like ffree + fincstp. */
15764FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15765{
15766 IEMOP_MNEMONIC("ffreep stN");
15767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15768
15769 IEM_MC_BEGIN(0, 0);
15770
15771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15773
15774 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15775 IEM_MC_FPU_STACK_INC_TOP();
15776 IEM_MC_UPDATE_FPU_OPCODE_IP();
15777
15778 IEM_MC_USED_FPU();
15779 IEM_MC_ADVANCE_RIP();
15780 IEM_MC_END();
15781 return VINF_SUCCESS;
15782}
15783
15784
15785/** Opcode 0xdf 0xe0. */
15786FNIEMOP_DEF(iemOp_fnstsw_ax)
15787{
15788 IEMOP_MNEMONIC("fnstsw ax");
15789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15790
15791 IEM_MC_BEGIN(0, 1);
15792 IEM_MC_LOCAL(uint16_t, u16Tmp);
15793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15794 IEM_MC_FETCH_FSW(u16Tmp);
15795 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15796 IEM_MC_ADVANCE_RIP();
15797 IEM_MC_END();
15798 return VINF_SUCCESS;
15799}
15800
15801
15802/** Opcode 0xdf 11/5. */
15803FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15804{
15805 IEMOP_MNEMONIC("fcomip st0,stN");
15806 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15807}
15808
15809
15810/** Opcode 0xdf 11/6. */
15811FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15812{
15813 IEMOP_MNEMONIC("fcomip st0,stN");
15814 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15815}
15816
15817
15818/** Opcode 0xdf !11/0. */
15819FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15820{
15821 IEMOP_MNEMONIC("fild m16i");
15822
15823 IEM_MC_BEGIN(2, 3);
15824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15825 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15826 IEM_MC_LOCAL(int16_t, i16Val);
15827 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15828 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15829
15830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15832
15833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15835 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15836
15837 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15838 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15839 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15840 IEM_MC_ELSE()
15841 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15842 IEM_MC_ENDIF();
15843 IEM_MC_USED_FPU();
15844 IEM_MC_ADVANCE_RIP();
15845
15846 IEM_MC_END();
15847 return VINF_SUCCESS;
15848}
15849
15850
15851/** Opcode 0xdf !11/1. */
15852FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15853{
15854 IEMOP_MNEMONIC("fisttp m16i");
15855 IEM_MC_BEGIN(3, 2);
15856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15857 IEM_MC_LOCAL(uint16_t, u16Fsw);
15858 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15859 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15860 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15861
15862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15866
15867 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15868 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15869 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15870 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15871 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15872 IEM_MC_ELSE()
15873 IEM_MC_IF_FCW_IM()
15874 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15875 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15876 IEM_MC_ENDIF();
15877 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15878 IEM_MC_ENDIF();
15879 IEM_MC_USED_FPU();
15880 IEM_MC_ADVANCE_RIP();
15881
15882 IEM_MC_END();
15883 return VINF_SUCCESS;
15884}
15885
15886
15887/** Opcode 0xdf !11/2. */
15888FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15889{
15890 IEMOP_MNEMONIC("fistp m16i");
15891 IEM_MC_BEGIN(3, 2);
15892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15893 IEM_MC_LOCAL(uint16_t, u16Fsw);
15894 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15895 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15896 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15897
15898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15902
15903 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15904 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15905 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15906 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15907 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15908 IEM_MC_ELSE()
15909 IEM_MC_IF_FCW_IM()
15910 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15911 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15912 IEM_MC_ENDIF();
15913 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15914 IEM_MC_ENDIF();
15915 IEM_MC_USED_FPU();
15916 IEM_MC_ADVANCE_RIP();
15917
15918 IEM_MC_END();
15919 return VINF_SUCCESS;
15920}
15921
15922
15923/** Opcode 0xdf !11/3. */
15924FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15925{
15926 IEMOP_MNEMONIC("fistp m16i");
15927 IEM_MC_BEGIN(3, 2);
15928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15929 IEM_MC_LOCAL(uint16_t, u16Fsw);
15930 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15931 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15932 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15933
15934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15938
15939 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15940 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15941 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15942 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15943 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15944 IEM_MC_ELSE()
15945 IEM_MC_IF_FCW_IM()
15946 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15947 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15948 IEM_MC_ENDIF();
15949 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15950 IEM_MC_ENDIF();
15951 IEM_MC_USED_FPU();
15952 IEM_MC_ADVANCE_RIP();
15953
15954 IEM_MC_END();
15955 return VINF_SUCCESS;
15956}
15957
15958
15959/** Opcode 0xdf !11/4. */
15960FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15961
15962
15963/** Opcode 0xdf !11/5. */
15964FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15965{
15966 IEMOP_MNEMONIC("fild m64i");
15967
15968 IEM_MC_BEGIN(2, 3);
15969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15970 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15971 IEM_MC_LOCAL(int64_t, i64Val);
15972 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15973 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15974
15975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15977
15978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15980 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15981
15982 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15983 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15984 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15985 IEM_MC_ELSE()
15986 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15987 IEM_MC_ENDIF();
15988 IEM_MC_USED_FPU();
15989 IEM_MC_ADVANCE_RIP();
15990
15991 IEM_MC_END();
15992 return VINF_SUCCESS;
15993}
15994
15995
15996/** Opcode 0xdf !11/6. */
15997FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15998
15999
16000/** Opcode 0xdf !11/7. */
16001FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16002{
16003 IEMOP_MNEMONIC("fistp m64i");
16004 IEM_MC_BEGIN(3, 2);
16005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16006 IEM_MC_LOCAL(uint16_t, u16Fsw);
16007 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16008 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16010
16011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16015
16016 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16017 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16018 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16019 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16020 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16021 IEM_MC_ELSE()
16022 IEM_MC_IF_FCW_IM()
16023 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16024 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16025 IEM_MC_ENDIF();
16026 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16027 IEM_MC_ENDIF();
16028 IEM_MC_USED_FPU();
16029 IEM_MC_ADVANCE_RIP();
16030
16031 IEM_MC_END();
16032 return VINF_SUCCESS;
16033}
16034
16035
16036/** Opcode 0xdf. */
16037FNIEMOP_DEF(iemOp_EscF7)
16038{
16039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16041 {
16042 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16043 {
16044 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16045 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16046 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16047 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16048 case 4: if (bRm == 0xe0)
16049 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16050 return IEMOP_RAISE_INVALID_OPCODE();
16051 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16052 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16053 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16055 }
16056 }
16057 else
16058 {
16059 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16060 {
16061 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16062 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16063 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16064 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16065 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16066 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16067 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16068 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16070 }
16071 }
16072}
16073
16074
16075/** Opcode 0xe0. */
16076FNIEMOP_DEF(iemOp_loopne_Jb)
16077{
16078 IEMOP_MNEMONIC("loopne Jb");
16079 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16080 IEMOP_HLP_NO_LOCK_PREFIX();
16081 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16082
16083 switch (pIemCpu->enmEffAddrMode)
16084 {
16085 case IEMMODE_16BIT:
16086 IEM_MC_BEGIN(0,0);
16087 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16088 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16089 IEM_MC_REL_JMP_S8(i8Imm);
16090 } IEM_MC_ELSE() {
16091 IEM_MC_ADVANCE_RIP();
16092 } IEM_MC_ENDIF();
16093 IEM_MC_END();
16094 return VINF_SUCCESS;
16095
16096 case IEMMODE_32BIT:
16097 IEM_MC_BEGIN(0,0);
16098 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16099 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16100 IEM_MC_REL_JMP_S8(i8Imm);
16101 } IEM_MC_ELSE() {
16102 IEM_MC_ADVANCE_RIP();
16103 } IEM_MC_ENDIF();
16104 IEM_MC_END();
16105 return VINF_SUCCESS;
16106
16107 case IEMMODE_64BIT:
16108 IEM_MC_BEGIN(0,0);
16109 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16110 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16111 IEM_MC_REL_JMP_S8(i8Imm);
16112 } IEM_MC_ELSE() {
16113 IEM_MC_ADVANCE_RIP();
16114 } IEM_MC_ENDIF();
16115 IEM_MC_END();
16116 return VINF_SUCCESS;
16117
16118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16119 }
16120}
16121
16122
16123/** Opcode 0xe1. */
16124FNIEMOP_DEF(iemOp_loope_Jb)
16125{
16126 IEMOP_MNEMONIC("loope Jb");
16127 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16128 IEMOP_HLP_NO_LOCK_PREFIX();
16129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16130
16131 switch (pIemCpu->enmEffAddrMode)
16132 {
16133 case IEMMODE_16BIT:
16134 IEM_MC_BEGIN(0,0);
16135 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16136 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16137 IEM_MC_REL_JMP_S8(i8Imm);
16138 } IEM_MC_ELSE() {
16139 IEM_MC_ADVANCE_RIP();
16140 } IEM_MC_ENDIF();
16141 IEM_MC_END();
16142 return VINF_SUCCESS;
16143
16144 case IEMMODE_32BIT:
16145 IEM_MC_BEGIN(0,0);
16146 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16147 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16148 IEM_MC_REL_JMP_S8(i8Imm);
16149 } IEM_MC_ELSE() {
16150 IEM_MC_ADVANCE_RIP();
16151 } IEM_MC_ENDIF();
16152 IEM_MC_END();
16153 return VINF_SUCCESS;
16154
16155 case IEMMODE_64BIT:
16156 IEM_MC_BEGIN(0,0);
16157 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16158 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16159 IEM_MC_REL_JMP_S8(i8Imm);
16160 } IEM_MC_ELSE() {
16161 IEM_MC_ADVANCE_RIP();
16162 } IEM_MC_ENDIF();
16163 IEM_MC_END();
16164 return VINF_SUCCESS;
16165
16166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16167 }
16168}
16169
16170
16171/** Opcode 0xe2. */
16172FNIEMOP_DEF(iemOp_loop_Jb)
16173{
16174 IEMOP_MNEMONIC("loop Jb");
16175 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16176 IEMOP_HLP_NO_LOCK_PREFIX();
16177 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16178
16179 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16180 * using the 32-bit operand size override. How can that be restarted? See
16181 * weird pseudo code in intel manual. */
16182 switch (pIemCpu->enmEffAddrMode)
16183 {
16184 case IEMMODE_16BIT:
16185 IEM_MC_BEGIN(0,0);
16186 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16187 {
16188 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16189 IEM_MC_IF_CX_IS_NZ() {
16190 IEM_MC_REL_JMP_S8(i8Imm);
16191 } IEM_MC_ELSE() {
16192 IEM_MC_ADVANCE_RIP();
16193 } IEM_MC_ENDIF();
16194 }
16195 else
16196 {
16197 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16198 IEM_MC_ADVANCE_RIP();
16199 }
16200 IEM_MC_END();
16201 return VINF_SUCCESS;
16202
16203 case IEMMODE_32BIT:
16204 IEM_MC_BEGIN(0,0);
16205 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16206 {
16207 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16208 IEM_MC_IF_ECX_IS_NZ() {
16209 IEM_MC_REL_JMP_S8(i8Imm);
16210 } IEM_MC_ELSE() {
16211 IEM_MC_ADVANCE_RIP();
16212 } IEM_MC_ENDIF();
16213 }
16214 else
16215 {
16216 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16217 IEM_MC_ADVANCE_RIP();
16218 }
16219 IEM_MC_END();
16220 return VINF_SUCCESS;
16221
16222 case IEMMODE_64BIT:
16223 IEM_MC_BEGIN(0,0);
16224 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16225 {
16226 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16227 IEM_MC_IF_RCX_IS_NZ() {
16228 IEM_MC_REL_JMP_S8(i8Imm);
16229 } IEM_MC_ELSE() {
16230 IEM_MC_ADVANCE_RIP();
16231 } IEM_MC_ENDIF();
16232 }
16233 else
16234 {
16235 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16236 IEM_MC_ADVANCE_RIP();
16237 }
16238 IEM_MC_END();
16239 return VINF_SUCCESS;
16240
16241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16242 }
16243}
16244
16245
16246/** Opcode 0xe3. */
16247FNIEMOP_DEF(iemOp_jecxz_Jb)
16248{
16249 IEMOP_MNEMONIC("jecxz Jb");
16250 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16251 IEMOP_HLP_NO_LOCK_PREFIX();
16252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16253
16254 switch (pIemCpu->enmEffAddrMode)
16255 {
16256 case IEMMODE_16BIT:
16257 IEM_MC_BEGIN(0,0);
16258 IEM_MC_IF_CX_IS_NZ() {
16259 IEM_MC_ADVANCE_RIP();
16260 } IEM_MC_ELSE() {
16261 IEM_MC_REL_JMP_S8(i8Imm);
16262 } IEM_MC_ENDIF();
16263 IEM_MC_END();
16264 return VINF_SUCCESS;
16265
16266 case IEMMODE_32BIT:
16267 IEM_MC_BEGIN(0,0);
16268 IEM_MC_IF_ECX_IS_NZ() {
16269 IEM_MC_ADVANCE_RIP();
16270 } IEM_MC_ELSE() {
16271 IEM_MC_REL_JMP_S8(i8Imm);
16272 } IEM_MC_ENDIF();
16273 IEM_MC_END();
16274 return VINF_SUCCESS;
16275
16276 case IEMMODE_64BIT:
16277 IEM_MC_BEGIN(0,0);
16278 IEM_MC_IF_RCX_IS_NZ() {
16279 IEM_MC_ADVANCE_RIP();
16280 } IEM_MC_ELSE() {
16281 IEM_MC_REL_JMP_S8(i8Imm);
16282 } IEM_MC_ENDIF();
16283 IEM_MC_END();
16284 return VINF_SUCCESS;
16285
16286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16287 }
16288}
16289
16290
16291/** Opcode 0xe4 */
16292FNIEMOP_DEF(iemOp_in_AL_Ib)
16293{
16294 IEMOP_MNEMONIC("in eAX,Ib");
16295 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16296 IEMOP_HLP_NO_LOCK_PREFIX();
16297 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16298}
16299
16300
16301/** Opcode 0xe5 */
16302FNIEMOP_DEF(iemOp_in_eAX_Ib)
16303{
16304 IEMOP_MNEMONIC("in eAX,Ib");
16305 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16306 IEMOP_HLP_NO_LOCK_PREFIX();
16307 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16308}
16309
16310
16311/** Opcode 0xe6 */
16312FNIEMOP_DEF(iemOp_out_Ib_AL)
16313{
16314 IEMOP_MNEMONIC("out Ib,AL");
16315 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16316 IEMOP_HLP_NO_LOCK_PREFIX();
16317 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16318}
16319
16320
16321/** Opcode 0xe7 */
16322FNIEMOP_DEF(iemOp_out_Ib_eAX)
16323{
16324 IEMOP_MNEMONIC("out Ib,eAX");
16325 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16326 IEMOP_HLP_NO_LOCK_PREFIX();
16327 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16328}
16329
16330
16331/** Opcode 0xe8. */
16332FNIEMOP_DEF(iemOp_call_Jv)
16333{
16334 IEMOP_MNEMONIC("call Jv");
16335 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16336 switch (pIemCpu->enmEffOpSize)
16337 {
16338 case IEMMODE_16BIT:
16339 {
16340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16341 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16342 }
16343
16344 case IEMMODE_32BIT:
16345 {
16346 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16347 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16348 }
16349
16350 case IEMMODE_64BIT:
16351 {
16352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16353 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16354 }
16355
16356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16357 }
16358}
16359
16360
16361/** Opcode 0xe9. */
16362FNIEMOP_DEF(iemOp_jmp_Jv)
16363{
16364 IEMOP_MNEMONIC("jmp Jv");
16365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16366 switch (pIemCpu->enmEffOpSize)
16367 {
16368 case IEMMODE_16BIT:
16369 {
16370 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16371 IEM_MC_BEGIN(0, 0);
16372 IEM_MC_REL_JMP_S16(i16Imm);
16373 IEM_MC_END();
16374 return VINF_SUCCESS;
16375 }
16376
16377 case IEMMODE_64BIT:
16378 case IEMMODE_32BIT:
16379 {
16380 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16381 IEM_MC_BEGIN(0, 0);
16382 IEM_MC_REL_JMP_S32(i32Imm);
16383 IEM_MC_END();
16384 return VINF_SUCCESS;
16385 }
16386
16387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16388 }
16389}
16390
16391
16392/** Opcode 0xea. */
16393FNIEMOP_DEF(iemOp_jmp_Ap)
16394{
16395 IEMOP_MNEMONIC("jmp Ap");
16396 IEMOP_HLP_NO_64BIT();
16397
16398 /* Decode the far pointer address and pass it on to the far call C implementation. */
16399 uint32_t offSeg;
16400 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16401 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16402 else
16403 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16404 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16405 IEMOP_HLP_NO_LOCK_PREFIX();
16406 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16407}
16408
16409
16410/** Opcode 0xeb. */
16411FNIEMOP_DEF(iemOp_jmp_Jb)
16412{
16413 IEMOP_MNEMONIC("jmp Jb");
16414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16415 IEMOP_HLP_NO_LOCK_PREFIX();
16416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16417
16418 IEM_MC_BEGIN(0, 0);
16419 IEM_MC_REL_JMP_S8(i8Imm);
16420 IEM_MC_END();
16421 return VINF_SUCCESS;
16422}
16423
16424
16425/** Opcode 0xec */
16426FNIEMOP_DEF(iemOp_in_AL_DX)
16427{
16428 IEMOP_MNEMONIC("in AL,DX");
16429 IEMOP_HLP_NO_LOCK_PREFIX();
16430 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16431}
16432
16433
16434/** Opcode 0xed */
16435FNIEMOP_DEF(iemOp_eAX_DX)
16436{
16437 IEMOP_MNEMONIC("in eAX,DX");
16438 IEMOP_HLP_NO_LOCK_PREFIX();
16439 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16440}
16441
16442
16443/** Opcode 0xee */
16444FNIEMOP_DEF(iemOp_out_DX_AL)
16445{
16446 IEMOP_MNEMONIC("out DX,AL");
16447 IEMOP_HLP_NO_LOCK_PREFIX();
16448 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16449}
16450
16451
16452/** Opcode 0xef */
16453FNIEMOP_DEF(iemOp_out_DX_eAX)
16454{
16455 IEMOP_MNEMONIC("out DX,eAX");
16456 IEMOP_HLP_NO_LOCK_PREFIX();
16457 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16458}
16459
16460
16461/** Opcode 0xf0. */
16462FNIEMOP_DEF(iemOp_lock)
16463{
16464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16465 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16466
16467 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16468 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16469}
16470
16471
16472/** Opcode 0xf1. */
16473FNIEMOP_DEF(iemOp_int_1)
16474{
16475 IEMOP_MNEMONIC("int1"); /* icebp */
16476 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16477 /** @todo testcase! */
16478 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16479}
16480
16481
16482/** Opcode 0xf2. */
16483FNIEMOP_DEF(iemOp_repne)
16484{
16485 /* This overrides any previous REPE prefix. */
16486 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16487 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16488 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16489
16490 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16491 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16492}
16493
16494
16495/** Opcode 0xf3. */
16496FNIEMOP_DEF(iemOp_repe)
16497{
16498 /* This overrides any previous REPNE prefix. */
16499 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16500 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16501 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16502
16503 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16504 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16505}
16506
16507
16508/** Opcode 0xf4. */
16509FNIEMOP_DEF(iemOp_hlt)
16510{
16511 IEMOP_HLP_NO_LOCK_PREFIX();
16512 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16513}
16514
16515
16516/** Opcode 0xf5. */
16517FNIEMOP_DEF(iemOp_cmc)
16518{
16519 IEMOP_MNEMONIC("cmc");
16520 IEMOP_HLP_NO_LOCK_PREFIX();
16521 IEM_MC_BEGIN(0, 0);
16522 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16523 IEM_MC_ADVANCE_RIP();
16524 IEM_MC_END();
16525 return VINF_SUCCESS;
16526}
16527
16528
16529/**
16530 * Common implementation of 'inc/dec/not/neg Eb'.
16531 *
16532 * @param bRm The RM byte.
16533 * @param pImpl The instruction implementation.
16534 */
16535FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16536{
16537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16538 {
16539 /* register access */
16540 IEM_MC_BEGIN(2, 0);
16541 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16542 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16543 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16544 IEM_MC_REF_EFLAGS(pEFlags);
16545 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16546 IEM_MC_ADVANCE_RIP();
16547 IEM_MC_END();
16548 }
16549 else
16550 {
16551 /* memory access. */
16552 IEM_MC_BEGIN(2, 2);
16553 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16554 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16556
16557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16558 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16559 IEM_MC_FETCH_EFLAGS(EFlags);
16560 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16561 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16562 else
16563 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16564
16565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16566 IEM_MC_COMMIT_EFLAGS(EFlags);
16567 IEM_MC_ADVANCE_RIP();
16568 IEM_MC_END();
16569 }
16570 return VINF_SUCCESS;
16571}
16572
16573
16574/**
16575 * Common implementation of 'inc/dec/not/neg Ev'.
16576 *
16577 * @param bRm The RM byte.
16578 * @param pImpl The instruction implementation.
16579 */
16580FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16581{
16582 /* Registers are handled by a common worker. */
16583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16584 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16585
16586 /* Memory we do here. */
16587 switch (pIemCpu->enmEffOpSize)
16588 {
16589 case IEMMODE_16BIT:
16590 IEM_MC_BEGIN(2, 2);
16591 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16592 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16594
16595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16596 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16597 IEM_MC_FETCH_EFLAGS(EFlags);
16598 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16599 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16600 else
16601 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16602
16603 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16604 IEM_MC_COMMIT_EFLAGS(EFlags);
16605 IEM_MC_ADVANCE_RIP();
16606 IEM_MC_END();
16607 return VINF_SUCCESS;
16608
16609 case IEMMODE_32BIT:
16610 IEM_MC_BEGIN(2, 2);
16611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16614
16615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16616 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16617 IEM_MC_FETCH_EFLAGS(EFlags);
16618 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16619 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16620 else
16621 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16622
16623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16624 IEM_MC_COMMIT_EFLAGS(EFlags);
16625 IEM_MC_ADVANCE_RIP();
16626 IEM_MC_END();
16627 return VINF_SUCCESS;
16628
16629 case IEMMODE_64BIT:
16630 IEM_MC_BEGIN(2, 2);
16631 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16632 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16634
16635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16636 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16637 IEM_MC_FETCH_EFLAGS(EFlags);
16638 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16639 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16640 else
16641 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16642
16643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16644 IEM_MC_COMMIT_EFLAGS(EFlags);
16645 IEM_MC_ADVANCE_RIP();
16646 IEM_MC_END();
16647 return VINF_SUCCESS;
16648
16649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16650 }
16651}
16652
16653
16654/** Opcode 0xf6 /0. */
16655FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16656{
16657 IEMOP_MNEMONIC("test Eb,Ib");
16658 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16659
16660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16661 {
16662 /* register access */
16663 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16664 IEMOP_HLP_NO_LOCK_PREFIX();
16665
16666 IEM_MC_BEGIN(3, 0);
16667 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16668 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16670 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16671 IEM_MC_REF_EFLAGS(pEFlags);
16672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16673 IEM_MC_ADVANCE_RIP();
16674 IEM_MC_END();
16675 }
16676 else
16677 {
16678 /* memory access. */
16679 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16680
16681 IEM_MC_BEGIN(3, 2);
16682 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16683 IEM_MC_ARG(uint8_t, u8Src, 1);
16684 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16686
16687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16688 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16689 IEM_MC_ASSIGN(u8Src, u8Imm);
16690 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16691 IEM_MC_FETCH_EFLAGS(EFlags);
16692 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16693
16694 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16695 IEM_MC_COMMIT_EFLAGS(EFlags);
16696 IEM_MC_ADVANCE_RIP();
16697 IEM_MC_END();
16698 }
16699 return VINF_SUCCESS;
16700}
16701
16702
16703/** Opcode 0xf7 /0. */
16704FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16705{
16706 IEMOP_MNEMONIC("test Ev,Iv");
16707 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16709
16710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16711 {
16712 /* register access */
16713 switch (pIemCpu->enmEffOpSize)
16714 {
16715 case IEMMODE_16BIT:
16716 {
16717 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16718 IEM_MC_BEGIN(3, 0);
16719 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16720 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16721 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16722 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16723 IEM_MC_REF_EFLAGS(pEFlags);
16724 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16725 IEM_MC_ADVANCE_RIP();
16726 IEM_MC_END();
16727 return VINF_SUCCESS;
16728 }
16729
16730 case IEMMODE_32BIT:
16731 {
16732 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16733 IEM_MC_BEGIN(3, 0);
16734 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16735 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16736 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16737 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16738 IEM_MC_REF_EFLAGS(pEFlags);
16739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16740 /* No clearing the high dword here - test doesn't write back the result. */
16741 IEM_MC_ADVANCE_RIP();
16742 IEM_MC_END();
16743 return VINF_SUCCESS;
16744 }
16745
16746 case IEMMODE_64BIT:
16747 {
16748 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16749 IEM_MC_BEGIN(3, 0);
16750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16751 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16752 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16753 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16754 IEM_MC_REF_EFLAGS(pEFlags);
16755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16756 IEM_MC_ADVANCE_RIP();
16757 IEM_MC_END();
16758 return VINF_SUCCESS;
16759 }
16760
16761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16762 }
16763 }
16764 else
16765 {
16766 /* memory access. */
16767 switch (pIemCpu->enmEffOpSize)
16768 {
16769 case IEMMODE_16BIT:
16770 {
16771 IEM_MC_BEGIN(3, 2);
16772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16773 IEM_MC_ARG(uint16_t, u16Src, 1);
16774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16776
16777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16778 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16779 IEM_MC_ASSIGN(u16Src, u16Imm);
16780 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16781 IEM_MC_FETCH_EFLAGS(EFlags);
16782 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16783
16784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16785 IEM_MC_COMMIT_EFLAGS(EFlags);
16786 IEM_MC_ADVANCE_RIP();
16787 IEM_MC_END();
16788 return VINF_SUCCESS;
16789 }
16790
16791 case IEMMODE_32BIT:
16792 {
16793 IEM_MC_BEGIN(3, 2);
16794 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16795 IEM_MC_ARG(uint32_t, u32Src, 1);
16796 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16798
16799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16800 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16801 IEM_MC_ASSIGN(u32Src, u32Imm);
16802 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16803 IEM_MC_FETCH_EFLAGS(EFlags);
16804 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16805
16806 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16807 IEM_MC_COMMIT_EFLAGS(EFlags);
16808 IEM_MC_ADVANCE_RIP();
16809 IEM_MC_END();
16810 return VINF_SUCCESS;
16811 }
16812
16813 case IEMMODE_64BIT:
16814 {
16815 IEM_MC_BEGIN(3, 2);
16816 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16817 IEM_MC_ARG(uint64_t, u64Src, 1);
16818 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16820
16821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16822 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16823 IEM_MC_ASSIGN(u64Src, u64Imm);
16824 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16825 IEM_MC_FETCH_EFLAGS(EFlags);
16826 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16827
16828 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16829 IEM_MC_COMMIT_EFLAGS(EFlags);
16830 IEM_MC_ADVANCE_RIP();
16831 IEM_MC_END();
16832 return VINF_SUCCESS;
16833 }
16834
16835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16836 }
16837 }
16838}
16839
16840
16841/** Opcode 0xf6 /4, /5, /6 and /7. */
16842FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16843{
16844 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16845
16846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16847 {
16848 /* register access */
16849 IEMOP_HLP_NO_LOCK_PREFIX();
16850 IEM_MC_BEGIN(3, 1);
16851 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16852 IEM_MC_ARG(uint8_t, u8Value, 1);
16853 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16854 IEM_MC_LOCAL(int32_t, rc);
16855
16856 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16857 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16858 IEM_MC_REF_EFLAGS(pEFlags);
16859 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16860 IEM_MC_IF_LOCAL_IS_Z(rc) {
16861 IEM_MC_ADVANCE_RIP();
16862 } IEM_MC_ELSE() {
16863 IEM_MC_RAISE_DIVIDE_ERROR();
16864 } IEM_MC_ENDIF();
16865
16866 IEM_MC_END();
16867 }
16868 else
16869 {
16870 /* memory access. */
16871 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16872
16873 IEM_MC_BEGIN(3, 2);
16874 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16875 IEM_MC_ARG(uint8_t, u8Value, 1);
16876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16878 IEM_MC_LOCAL(int32_t, rc);
16879
16880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16881 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16882 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16883 IEM_MC_REF_EFLAGS(pEFlags);
16884 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16885 IEM_MC_IF_LOCAL_IS_Z(rc) {
16886 IEM_MC_ADVANCE_RIP();
16887 } IEM_MC_ELSE() {
16888 IEM_MC_RAISE_DIVIDE_ERROR();
16889 } IEM_MC_ENDIF();
16890
16891 IEM_MC_END();
16892 }
16893 return VINF_SUCCESS;
16894}
16895
16896
16897/** Opcode 0xf7 /4, /5, /6 and /7. */
16898FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16899{
16900 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16902
16903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16904 {
16905 /* register access */
16906 switch (pIemCpu->enmEffOpSize)
16907 {
16908 case IEMMODE_16BIT:
16909 {
16910 IEMOP_HLP_NO_LOCK_PREFIX();
16911 IEM_MC_BEGIN(4, 1);
16912 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16913 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16914 IEM_MC_ARG(uint16_t, u16Value, 2);
16915 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16916 IEM_MC_LOCAL(int32_t, rc);
16917
16918 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16919 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16920 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16921 IEM_MC_REF_EFLAGS(pEFlags);
16922 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16923 IEM_MC_IF_LOCAL_IS_Z(rc) {
16924 IEM_MC_ADVANCE_RIP();
16925 } IEM_MC_ELSE() {
16926 IEM_MC_RAISE_DIVIDE_ERROR();
16927 } IEM_MC_ENDIF();
16928
16929 IEM_MC_END();
16930 return VINF_SUCCESS;
16931 }
16932
16933 case IEMMODE_32BIT:
16934 {
16935 IEMOP_HLP_NO_LOCK_PREFIX();
16936 IEM_MC_BEGIN(4, 1);
16937 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16938 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16939 IEM_MC_ARG(uint32_t, u32Value, 2);
16940 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16941 IEM_MC_LOCAL(int32_t, rc);
16942
16943 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16944 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16945 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16946 IEM_MC_REF_EFLAGS(pEFlags);
16947 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16948 IEM_MC_IF_LOCAL_IS_Z(rc) {
16949 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16950 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16951 IEM_MC_ADVANCE_RIP();
16952 } IEM_MC_ELSE() {
16953 IEM_MC_RAISE_DIVIDE_ERROR();
16954 } IEM_MC_ENDIF();
16955
16956 IEM_MC_END();
16957 return VINF_SUCCESS;
16958 }
16959
16960 case IEMMODE_64BIT:
16961 {
16962 IEMOP_HLP_NO_LOCK_PREFIX();
16963 IEM_MC_BEGIN(4, 1);
16964 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16965 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16966 IEM_MC_ARG(uint64_t, u64Value, 2);
16967 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16968 IEM_MC_LOCAL(int32_t, rc);
16969
16970 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16971 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16972 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16973 IEM_MC_REF_EFLAGS(pEFlags);
16974 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16975 IEM_MC_IF_LOCAL_IS_Z(rc) {
16976 IEM_MC_ADVANCE_RIP();
16977 } IEM_MC_ELSE() {
16978 IEM_MC_RAISE_DIVIDE_ERROR();
16979 } IEM_MC_ENDIF();
16980
16981 IEM_MC_END();
16982 return VINF_SUCCESS;
16983 }
16984
16985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16986 }
16987 }
16988 else
16989 {
16990 /* memory access. */
16991 switch (pIemCpu->enmEffOpSize)
16992 {
16993 case IEMMODE_16BIT:
16994 {
16995 IEMOP_HLP_NO_LOCK_PREFIX();
16996 IEM_MC_BEGIN(4, 2);
16997 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16998 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16999 IEM_MC_ARG(uint16_t, u16Value, 2);
17000 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17002 IEM_MC_LOCAL(int32_t, rc);
17003
17004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17005 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17006 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17007 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17008 IEM_MC_REF_EFLAGS(pEFlags);
17009 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17010 IEM_MC_IF_LOCAL_IS_Z(rc) {
17011 IEM_MC_ADVANCE_RIP();
17012 } IEM_MC_ELSE() {
17013 IEM_MC_RAISE_DIVIDE_ERROR();
17014 } IEM_MC_ENDIF();
17015
17016 IEM_MC_END();
17017 return VINF_SUCCESS;
17018 }
17019
17020 case IEMMODE_32BIT:
17021 {
17022 IEMOP_HLP_NO_LOCK_PREFIX();
17023 IEM_MC_BEGIN(4, 2);
17024 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17025 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17026 IEM_MC_ARG(uint32_t, u32Value, 2);
17027 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17029 IEM_MC_LOCAL(int32_t, rc);
17030
17031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17032 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17033 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17034 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17035 IEM_MC_REF_EFLAGS(pEFlags);
17036 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17037 IEM_MC_IF_LOCAL_IS_Z(rc) {
17038 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17039 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17040 IEM_MC_ADVANCE_RIP();
17041 } IEM_MC_ELSE() {
17042 IEM_MC_RAISE_DIVIDE_ERROR();
17043 } IEM_MC_ENDIF();
17044
17045 IEM_MC_END();
17046 return VINF_SUCCESS;
17047 }
17048
17049 case IEMMODE_64BIT:
17050 {
17051 IEMOP_HLP_NO_LOCK_PREFIX();
17052 IEM_MC_BEGIN(4, 2);
17053 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17054 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17055 IEM_MC_ARG(uint64_t, u64Value, 2);
17056 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17058 IEM_MC_LOCAL(int32_t, rc);
17059
17060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17061 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17062 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17063 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17064 IEM_MC_REF_EFLAGS(pEFlags);
17065 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17066 IEM_MC_IF_LOCAL_IS_Z(rc) {
17067 IEM_MC_ADVANCE_RIP();
17068 } IEM_MC_ELSE() {
17069 IEM_MC_RAISE_DIVIDE_ERROR();
17070 } IEM_MC_ENDIF();
17071
17072 IEM_MC_END();
17073 return VINF_SUCCESS;
17074 }
17075
17076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17077 }
17078 }
17079}
17080
17081/** Opcode 0xf6. */
17082FNIEMOP_DEF(iemOp_Grp3_Eb)
17083{
17084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17085 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17086 {
17087 case 0:
17088 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17089 case 1:
17090/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17091 return IEMOP_RAISE_INVALID_OPCODE();
17092 case 2:
17093 IEMOP_MNEMONIC("not Eb");
17094 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17095 case 3:
17096 IEMOP_MNEMONIC("neg Eb");
17097 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17098 case 4:
17099 IEMOP_MNEMONIC("mul Eb");
17100 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17101 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17102 case 5:
17103 IEMOP_MNEMONIC("imul Eb");
17104 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17105 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17106 case 6:
17107 IEMOP_MNEMONIC("div Eb");
17108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17109 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17110 case 7:
17111 IEMOP_MNEMONIC("idiv Eb");
17112 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17113 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17115 }
17116}
17117
17118
17119/** Opcode 0xf7. */
17120FNIEMOP_DEF(iemOp_Grp3_Ev)
17121{
17122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17123 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17124 {
17125 case 0:
17126 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17127 case 1:
17128/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17129 return IEMOP_RAISE_INVALID_OPCODE();
17130 case 2:
17131 IEMOP_MNEMONIC("not Ev");
17132 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17133 case 3:
17134 IEMOP_MNEMONIC("neg Ev");
17135 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17136 case 4:
17137 IEMOP_MNEMONIC("mul Ev");
17138 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17139 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17140 case 5:
17141 IEMOP_MNEMONIC("imul Ev");
17142 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17143 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17144 case 6:
17145 IEMOP_MNEMONIC("div Ev");
17146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17147 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17148 case 7:
17149 IEMOP_MNEMONIC("idiv Ev");
17150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17151 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17153 }
17154}
17155
17156
17157/** Opcode 0xf8. */
17158FNIEMOP_DEF(iemOp_clc)
17159{
17160 IEMOP_MNEMONIC("clc");
17161 IEMOP_HLP_NO_LOCK_PREFIX();
17162 IEM_MC_BEGIN(0, 0);
17163 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17164 IEM_MC_ADVANCE_RIP();
17165 IEM_MC_END();
17166 return VINF_SUCCESS;
17167}
17168
17169
17170/** Opcode 0xf9. */
17171FNIEMOP_DEF(iemOp_stc)
17172{
17173 IEMOP_MNEMONIC("stc");
17174 IEMOP_HLP_NO_LOCK_PREFIX();
17175 IEM_MC_BEGIN(0, 0);
17176 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17177 IEM_MC_ADVANCE_RIP();
17178 IEM_MC_END();
17179 return VINF_SUCCESS;
17180}
17181
17182
17183/** Opcode 0xfa. */
17184FNIEMOP_DEF(iemOp_cli)
17185{
17186 IEMOP_MNEMONIC("cli");
17187 IEMOP_HLP_NO_LOCK_PREFIX();
17188 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17189}
17190
17191
17192FNIEMOP_DEF(iemOp_sti)
17193{
17194 IEMOP_MNEMONIC("sti");
17195 IEMOP_HLP_NO_LOCK_PREFIX();
17196 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17197}
17198
17199
17200/** Opcode 0xfc. */
17201FNIEMOP_DEF(iemOp_cld)
17202{
17203 IEMOP_MNEMONIC("cld");
17204 IEMOP_HLP_NO_LOCK_PREFIX();
17205 IEM_MC_BEGIN(0, 0);
17206 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17207 IEM_MC_ADVANCE_RIP();
17208 IEM_MC_END();
17209 return VINF_SUCCESS;
17210}
17211
17212
17213/** Opcode 0xfd. */
17214FNIEMOP_DEF(iemOp_std)
17215{
17216 IEMOP_MNEMONIC("std");
17217 IEMOP_HLP_NO_LOCK_PREFIX();
17218 IEM_MC_BEGIN(0, 0);
17219 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17220 IEM_MC_ADVANCE_RIP();
17221 IEM_MC_END();
17222 return VINF_SUCCESS;
17223}
17224
17225
17226/** Opcode 0xfe. */
17227FNIEMOP_DEF(iemOp_Grp4)
17228{
17229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17230 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17231 {
17232 case 0:
17233 IEMOP_MNEMONIC("inc Ev");
17234 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17235 case 1:
17236 IEMOP_MNEMONIC("dec Ev");
17237 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17238 default:
17239 IEMOP_MNEMONIC("grp4-ud");
17240 return IEMOP_RAISE_INVALID_OPCODE();
17241 }
17242}
17243
17244
17245/**
17246 * Opcode 0xff /2.
17247 * @param bRm The RM byte.
17248 */
17249FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17250{
17251 IEMOP_MNEMONIC("calln Ev");
17252 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17254
17255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17256 {
17257 /* The new RIP is taken from a register. */
17258 switch (pIemCpu->enmEffOpSize)
17259 {
17260 case IEMMODE_16BIT:
17261 IEM_MC_BEGIN(1, 0);
17262 IEM_MC_ARG(uint16_t, u16Target, 0);
17263 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17264 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17265 IEM_MC_END()
17266 return VINF_SUCCESS;
17267
17268 case IEMMODE_32BIT:
17269 IEM_MC_BEGIN(1, 0);
17270 IEM_MC_ARG(uint32_t, u32Target, 0);
17271 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17272 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17273 IEM_MC_END()
17274 return VINF_SUCCESS;
17275
17276 case IEMMODE_64BIT:
17277 IEM_MC_BEGIN(1, 0);
17278 IEM_MC_ARG(uint64_t, u64Target, 0);
17279 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17280 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17281 IEM_MC_END()
17282 return VINF_SUCCESS;
17283
17284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17285 }
17286 }
17287 else
17288 {
17289 /* The new RIP is taken from a register. */
17290 switch (pIemCpu->enmEffOpSize)
17291 {
17292 case IEMMODE_16BIT:
17293 IEM_MC_BEGIN(1, 1);
17294 IEM_MC_ARG(uint16_t, u16Target, 0);
17295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17297 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17298 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17299 IEM_MC_END()
17300 return VINF_SUCCESS;
17301
17302 case IEMMODE_32BIT:
17303 IEM_MC_BEGIN(1, 1);
17304 IEM_MC_ARG(uint32_t, u32Target, 0);
17305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17307 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17308 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17309 IEM_MC_END()
17310 return VINF_SUCCESS;
17311
17312 case IEMMODE_64BIT:
17313 IEM_MC_BEGIN(1, 1);
17314 IEM_MC_ARG(uint64_t, u64Target, 0);
17315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17317 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17318 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17319 IEM_MC_END()
17320 return VINF_SUCCESS;
17321
17322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17323 }
17324 }
17325}
17326
17327typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17328
17329FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17330{
17331 /* Registers? How?? */
17332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17333 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17334
17335 /* Far pointer loaded from memory. */
17336 switch (pIemCpu->enmEffOpSize)
17337 {
17338 case IEMMODE_16BIT:
17339 IEM_MC_BEGIN(3, 1);
17340 IEM_MC_ARG(uint16_t, u16Sel, 0);
17341 IEM_MC_ARG(uint16_t, offSeg, 1);
17342 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17346 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17347 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17348 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17349 IEM_MC_END();
17350 return VINF_SUCCESS;
17351
17352 case IEMMODE_64BIT:
17353 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17354 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17355 * and call far qword [rsp] encodings. */
17356 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17357 {
17358 IEM_MC_BEGIN(3, 1);
17359 IEM_MC_ARG(uint16_t, u16Sel, 0);
17360 IEM_MC_ARG(uint64_t, offSeg, 1);
17361 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17365 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17366 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17367 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17368 IEM_MC_END();
17369 return VINF_SUCCESS;
17370 }
17371 /* AMD falls thru. */
17372
17373 case IEMMODE_32BIT:
17374 IEM_MC_BEGIN(3, 1);
17375 IEM_MC_ARG(uint16_t, u16Sel, 0);
17376 IEM_MC_ARG(uint32_t, offSeg, 1);
17377 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17381 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17382 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17383 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17384 IEM_MC_END();
17385 return VINF_SUCCESS;
17386
17387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17388 }
17389}
17390
17391
17392/**
17393 * Opcode 0xff /3.
17394 * @param bRm The RM byte.
17395 */
17396FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17397{
17398 IEMOP_MNEMONIC("callf Ep");
17399 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17400}
17401
17402
17403/**
17404 * Opcode 0xff /4.
17405 * @param bRm The RM byte.
17406 */
17407FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17408{
17409 IEMOP_MNEMONIC("jmpn Ev");
17410 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17411 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17412
17413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17414 {
17415 /* The new RIP is taken from a register. */
17416 switch (pIemCpu->enmEffOpSize)
17417 {
17418 case IEMMODE_16BIT:
17419 IEM_MC_BEGIN(0, 1);
17420 IEM_MC_LOCAL(uint16_t, u16Target);
17421 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17422 IEM_MC_SET_RIP_U16(u16Target);
17423 IEM_MC_END()
17424 return VINF_SUCCESS;
17425
17426 case IEMMODE_32BIT:
17427 IEM_MC_BEGIN(0, 1);
17428 IEM_MC_LOCAL(uint32_t, u32Target);
17429 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17430 IEM_MC_SET_RIP_U32(u32Target);
17431 IEM_MC_END()
17432 return VINF_SUCCESS;
17433
17434 case IEMMODE_64BIT:
17435 IEM_MC_BEGIN(0, 1);
17436 IEM_MC_LOCAL(uint64_t, u64Target);
17437 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17438 IEM_MC_SET_RIP_U64(u64Target);
17439 IEM_MC_END()
17440 return VINF_SUCCESS;
17441
17442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17443 }
17444 }
17445 else
17446 {
17447 /* The new RIP is taken from a memory location. */
17448 switch (pIemCpu->enmEffOpSize)
17449 {
17450 case IEMMODE_16BIT:
17451 IEM_MC_BEGIN(0, 2);
17452 IEM_MC_LOCAL(uint16_t, u16Target);
17453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17455 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17456 IEM_MC_SET_RIP_U16(u16Target);
17457 IEM_MC_END()
17458 return VINF_SUCCESS;
17459
17460 case IEMMODE_32BIT:
17461 IEM_MC_BEGIN(0, 2);
17462 IEM_MC_LOCAL(uint32_t, u32Target);
17463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17465 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17466 IEM_MC_SET_RIP_U32(u32Target);
17467 IEM_MC_END()
17468 return VINF_SUCCESS;
17469
17470 case IEMMODE_64BIT:
17471 IEM_MC_BEGIN(0, 2);
17472 IEM_MC_LOCAL(uint64_t, u64Target);
17473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17475 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17476 IEM_MC_SET_RIP_U64(u64Target);
17477 IEM_MC_END()
17478 return VINF_SUCCESS;
17479
17480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17481 }
17482 }
17483}
17484
17485
17486/**
17487 * Opcode 0xff /5.
17488 * @param bRm The RM byte.
17489 */
17490FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17491{
17492 IEMOP_MNEMONIC("jmpf Ep");
17493 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17494}
17495
17496
17497/**
17498 * Opcode 0xff /6.
17499 * @param bRm The RM byte.
17500 */
17501FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17502{
17503 IEMOP_MNEMONIC("push Ev");
17504 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17505
17506 /* Registers are handled by a common worker. */
17507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17508 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17509
17510 /* Memory we do here. */
17511 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17512 switch (pIemCpu->enmEffOpSize)
17513 {
17514 case IEMMODE_16BIT:
17515 IEM_MC_BEGIN(0, 2);
17516 IEM_MC_LOCAL(uint16_t, u16Src);
17517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17519 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17520 IEM_MC_PUSH_U16(u16Src);
17521 IEM_MC_ADVANCE_RIP();
17522 IEM_MC_END();
17523 return VINF_SUCCESS;
17524
17525 case IEMMODE_32BIT:
17526 IEM_MC_BEGIN(0, 2);
17527 IEM_MC_LOCAL(uint32_t, u32Src);
17528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17530 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17531 IEM_MC_PUSH_U32(u32Src);
17532 IEM_MC_ADVANCE_RIP();
17533 IEM_MC_END();
17534 return VINF_SUCCESS;
17535
17536 case IEMMODE_64BIT:
17537 IEM_MC_BEGIN(0, 2);
17538 IEM_MC_LOCAL(uint64_t, u64Src);
17539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17541 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17542 IEM_MC_PUSH_U64(u64Src);
17543 IEM_MC_ADVANCE_RIP();
17544 IEM_MC_END();
17545 return VINF_SUCCESS;
17546
17547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17548 }
17549}
17550
17551
17552/** Opcode 0xff. */
17553FNIEMOP_DEF(iemOp_Grp5)
17554{
17555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17556 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17557 {
17558 case 0:
17559 IEMOP_MNEMONIC("inc Ev");
17560 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17561 case 1:
17562 IEMOP_MNEMONIC("dec Ev");
17563 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17564 case 2:
17565 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17566 case 3:
17567 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17568 case 4:
17569 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17570 case 5:
17571 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17572 case 6:
17573 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17574 case 7:
17575 IEMOP_MNEMONIC("grp5-ud");
17576 return IEMOP_RAISE_INVALID_OPCODE();
17577 }
17578 AssertFailedReturn(VERR_IEM_IPE_3);
17579}
17580
17581
17582
17583const PFNIEMOP g_apfnOneByteMap[256] =
17584{
17585 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17586 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17587 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17588 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17589 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17590 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17591 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17592 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17593 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17594 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17595 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17596 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17597 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17598 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17599 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17600 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17601 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17602 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17603 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17604 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17605 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17606 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17607 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17608 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17609 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17610 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17611 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17612 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17613 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17614 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17615 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17616 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17617 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17618 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17619 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17620 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17621 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17622 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17623 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17624 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17625 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17626 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17627 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17628 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17629 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17630 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17631 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17632 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17633 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17634 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17635 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17636 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17637 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17638 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17639 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17640 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17641 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17642 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17643 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17644 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17645 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17646 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17647 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17648 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17649};
17650
17651
17652/** @} */
17653
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette