VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60384

Last change on this file since 60384 was 60384, checked in by vboxsync, 9 years ago

IEM: Marked instructions introduced by the 186, 286, 386 and 486 to speed up debugging bs3kit on the 286.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.1 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60384 2016-04-08 00:16:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(3, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
802 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
803 IEM_MC_END();
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmcall)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmresume)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /0. */
833FNIEMOP_DEF(iemOp_Grp7_vmxoff)
834{
835 IEMOP_BITCH_ABOUT_STUB();
836 return IEMOP_RAISE_INVALID_OPCODE();
837}
838
839
840/** Opcode 0x0f 0x01 /1. */
841FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
842{
843 IEMOP_MNEMONIC("sidt Ms");
844 IEMOP_HLP_MIN_286();
845 IEMOP_HLP_64BIT_OP_SIZE();
846 IEM_MC_BEGIN(3, 1);
847 IEM_MC_ARG(uint8_t, iEffSeg, 0);
848 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
849 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
853 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_monitor)
861{
862 IEMOP_MNEMONIC("monitor");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
864 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF(iemOp_Grp7_mwait)
870{
871 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
874}
875
876
877/** Opcode 0x0f 0x01 /2. */
878FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
879{
880 IEMOP_MNEMONIC("lgdt");
881 IEMOP_HLP_64BIT_OP_SIZE();
882 IEM_MC_BEGIN(3, 1);
883 IEM_MC_ARG(uint8_t, iEffSeg, 0);
884 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
888 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
889 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
890 IEM_MC_END();
891 return VINF_SUCCESS;
892}
893
894
895/** Opcode 0x0f 0x01 0xd0. */
896FNIEMOP_DEF(iemOp_Grp7_xgetbv)
897{
898 IEMOP_MNEMONIC("xgetbv");
899 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
900 {
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
902 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
903 }
904 return IEMOP_RAISE_INVALID_OPCODE();
905}
906
907
908/** Opcode 0x0f 0x01 0xd1. */
909FNIEMOP_DEF(iemOp_Grp7_xsetbv)
910{
911 IEMOP_MNEMONIC("xsetbv");
912 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
913 {
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
916 }
917 return IEMOP_RAISE_INVALID_OPCODE();
918}
919
920
921/** Opcode 0x0f 0x01 /3. */
922FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
923{
924 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
925 ? IEMMODE_64BIT
926 : pIemCpu->enmEffOpSize;
927 IEM_MC_BEGIN(3, 1);
928 IEM_MC_ARG(uint8_t, iEffSeg, 0);
929 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
930 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
934 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
935 IEM_MC_END();
936 return VINF_SUCCESS;
937}
938
939
940/** Opcode 0x0f 0x01 0xd8. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
942
943/** Opcode 0x0f 0x01 0xd9. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
945
946/** Opcode 0x0f 0x01 0xda. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
948
949/** Opcode 0x0f 0x01 0xdb. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
951
952/** Opcode 0x0f 0x01 0xdc. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
954
955/** Opcode 0x0f 0x01 0xdd. */
956FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
957
958/** Opcode 0x0f 0x01 0xde. */
959FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
960
961/** Opcode 0x0f 0x01 0xdf. */
962FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
963
964/** Opcode 0x0f 0x01 /4. */
965FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
966{
967 IEMOP_MNEMONIC("smsw");
968 IEMOP_HLP_MIN_286();
969 IEMOP_HLP_NO_LOCK_PREFIX();
970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
971 {
972 switch (pIemCpu->enmEffOpSize)
973 {
974 case IEMMODE_16BIT:
975 IEM_MC_BEGIN(0, 1);
976 IEM_MC_LOCAL(uint16_t, u16Tmp);
977 IEM_MC_FETCH_CR0_U16(u16Tmp);
978 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
979 IEM_MC_ADVANCE_RIP();
980 IEM_MC_END();
981 return VINF_SUCCESS;
982
983 case IEMMODE_32BIT:
984 IEM_MC_BEGIN(0, 1);
985 IEM_MC_LOCAL(uint32_t, u32Tmp);
986 IEM_MC_FETCH_CR0_U32(u32Tmp);
987 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
988 IEM_MC_ADVANCE_RIP();
989 IEM_MC_END();
990 return VINF_SUCCESS;
991
992 case IEMMODE_64BIT:
993 IEM_MC_BEGIN(0, 1);
994 IEM_MC_LOCAL(uint64_t, u64Tmp);
995 IEM_MC_FETCH_CR0_U64(u64Tmp);
996 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
997 IEM_MC_ADVANCE_RIP();
998 IEM_MC_END();
999 return VINF_SUCCESS;
1000
1001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1002 }
1003 }
1004 else
1005 {
1006 /* Ignore operand size here, memory refs are always 16-bit. */
1007 IEM_MC_BEGIN(0, 2);
1008 IEM_MC_LOCAL(uint16_t, u16Tmp);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1011 IEM_MC_FETCH_CR0_U16(u16Tmp);
1012 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016 }
1017}
1018
1019
1020/** Opcode 0x0f 0x01 /6. */
1021FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1022{
1023 /* The operand size is effectively ignored, all is 16-bit and only the
1024 lower 3-bits are used. */
1025 IEMOP_MNEMONIC("lmsw");
1026 IEMOP_HLP_MIN_286();
1027 IEMOP_HLP_NO_LOCK_PREFIX();
1028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1029 {
1030 IEM_MC_BEGIN(1, 0);
1031 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1032 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1033 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1034 IEM_MC_END();
1035 }
1036 else
1037 {
1038 IEM_MC_BEGIN(1, 1);
1039 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1042 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 return VINF_SUCCESS;
1047}
1048
1049
1050/** Opcode 0x0f 0x01 /7. */
1051FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1052{
1053 IEMOP_MNEMONIC("invlpg");
1054 IEMOP_HLP_MIN_486();
1055 IEMOP_HLP_NO_LOCK_PREFIX();
1056 IEM_MC_BEGIN(1, 1);
1057 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1059 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1060 IEM_MC_END();
1061 return VINF_SUCCESS;
1062}
1063
1064
1065/** Opcode 0x0f 0x01 /7. */
1066FNIEMOP_DEF(iemOp_Grp7_swapgs)
1067{
1068 IEMOP_MNEMONIC("swapgs");
1069 IEMOP_HLP_ONLY_64BIT();
1070 IEMOP_HLP_NO_LOCK_PREFIX();
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1077{
1078 NOREF(pIemCpu);
1079 IEMOP_BITCH_ABOUT_STUB();
1080 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1081}
1082
1083
1084/** Opcode 0x0f 0x01. */
1085FNIEMOP_DEF(iemOp_Grp7)
1086{
1087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1088 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1089 {
1090 case 0:
1091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1092 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1093 switch (bRm & X86_MODRM_RM_MASK)
1094 {
1095 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1096 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1097 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1098 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1099 }
1100 return IEMOP_RAISE_INVALID_OPCODE();
1101
1102 case 1:
1103 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1104 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1105 switch (bRm & X86_MODRM_RM_MASK)
1106 {
1107 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1108 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 2:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 3:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1129 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1130 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1131 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1132 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1133 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1134 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1136 }
1137
1138 case 4:
1139 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1140
1141 case 5:
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 6:
1145 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1146
1147 case 7:
1148 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1149 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1150 switch (bRm & X86_MODRM_RM_MASK)
1151 {
1152 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1153 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1154 }
1155 return IEMOP_RAISE_INVALID_OPCODE();
1156
1157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1158 }
1159}
1160
1161/** Opcode 0x0f 0x00 /3. */
1162FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1163{
1164 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1166
1167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1168 {
1169 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1170 switch (pIemCpu->enmEffOpSize)
1171 {
1172 case IEMMODE_16BIT:
1173 {
1174 IEM_MC_BEGIN(4, 0);
1175 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1176 IEM_MC_ARG(uint16_t, u16Sel, 1);
1177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1178 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1179
1180 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1181 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1182 IEM_MC_REF_EFLAGS(pEFlags);
1183 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1184
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187 }
1188
1189 case IEMMODE_32BIT:
1190 case IEMMODE_64BIT:
1191 {
1192 IEM_MC_BEGIN(4, 0);
1193 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1194 IEM_MC_ARG(uint16_t, u16Sel, 1);
1195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1196 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1197
1198 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1199 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1200 IEM_MC_REF_EFLAGS(pEFlags);
1201 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1202
1203 IEM_MC_END();
1204 return VINF_SUCCESS;
1205 }
1206
1207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1208 }
1209 }
1210 else
1211 {
1212 switch (pIemCpu->enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(4, 1);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1220 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1222
1223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1224 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1225
1226 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1227 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1228 IEM_MC_REF_EFLAGS(pEFlags);
1229 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1230
1231 IEM_MC_END();
1232 return VINF_SUCCESS;
1233 }
1234
1235 case IEMMODE_32BIT:
1236 case IEMMODE_64BIT:
1237 {
1238 IEM_MC_BEGIN(4, 1);
1239 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1240 IEM_MC_ARG(uint16_t, u16Sel, 1);
1241 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1242 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1244
1245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1246 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1247/** @todo testcase: make sure it's a 16-bit read. */
1248
1249 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1250 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1251 IEM_MC_REF_EFLAGS(pEFlags);
1252 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1253
1254 IEM_MC_END();
1255 return VINF_SUCCESS;
1256 }
1257
1258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1259 }
1260 }
1261}
1262
1263
1264
1265/** Opcode 0x0f 0x02. */
1266FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1267{
1268 IEMOP_MNEMONIC("lar Gv,Ew");
1269 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1270}
1271
1272
1273/** Opcode 0x0f 0x03. */
1274FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1275{
1276 IEMOP_MNEMONIC("lsl Gv,Ew");
1277 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1278}
1279
1280
1281/** Opcode 0x0f 0x05. */
1282FNIEMOP_DEF(iemOp_syscall)
1283{
1284 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1285 IEMOP_HLP_NO_LOCK_PREFIX();
1286 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1287}
1288
1289
1290/** Opcode 0x0f 0x06. */
1291FNIEMOP_DEF(iemOp_clts)
1292{
1293 IEMOP_MNEMONIC("clts");
1294 IEMOP_HLP_NO_LOCK_PREFIX();
1295 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1296}
1297
1298
1299/** Opcode 0x0f 0x07. */
1300FNIEMOP_DEF(iemOp_sysret)
1301{
1302 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1303 IEMOP_HLP_NO_LOCK_PREFIX();
1304 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1305}
1306
1307
1308/** Opcode 0x0f 0x08. */
1309FNIEMOP_STUB(iemOp_invd);
1310// IEMOP_HLP_MIN_486();
1311
1312
1313/** Opcode 0x0f 0x09. */
1314FNIEMOP_DEF(iemOp_wbinvd)
1315{
1316 IEMOP_MNEMONIC("wbinvd");
1317 IEMOP_HLP_MIN_486();
1318 IEMOP_HLP_NO_LOCK_PREFIX();
1319 IEM_MC_BEGIN(0, 0);
1320 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1321 IEM_MC_ADVANCE_RIP();
1322 IEM_MC_END();
1323 return VINF_SUCCESS; /* ignore for now */
1324}
1325
1326
1327/** Opcode 0x0f 0x0b. */
1328FNIEMOP_STUB(iemOp_ud2);
1329
1330/** Opcode 0x0f 0x0d. */
1331FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1332{
1333 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1334 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1335 {
1336 IEMOP_MNEMONIC("GrpP");
1337 return IEMOP_RAISE_INVALID_OPCODE();
1338 }
1339
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1342 {
1343 IEMOP_MNEMONIC("GrpP");
1344 return IEMOP_RAISE_INVALID_OPCODE();
1345 }
1346
1347 IEMOP_HLP_NO_LOCK_PREFIX();
1348 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1349 {
1350 case 2: /* Aliased to /0 for the time being. */
1351 case 4: /* Aliased to /0 for the time being. */
1352 case 5: /* Aliased to /0 for the time being. */
1353 case 6: /* Aliased to /0 for the time being. */
1354 case 7: /* Aliased to /0 for the time being. */
1355 case 0: IEMOP_MNEMONIC("prefetch"); break;
1356 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1357 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1359 }
1360
1361 IEM_MC_BEGIN(0, 1);
1362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1364 /* Currently a NOP. */
1365 IEM_MC_ADVANCE_RIP();
1366 IEM_MC_END();
1367 return VINF_SUCCESS;
1368}
1369
1370
1371/** Opcode 0x0f 0x0e. */
1372FNIEMOP_STUB(iemOp_femms);
1373
1374
1375/** Opcode 0x0f 0x0f 0x0c. */
1376FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1377
1378/** Opcode 0x0f 0x0f 0x0d. */
1379FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1380
1381/** Opcode 0x0f 0x0f 0x1c. */
1382FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1383
1384/** Opcode 0x0f 0x0f 0x1d. */
1385FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1386
1387/** Opcode 0x0f 0x0f 0x8a. */
1388FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1389
1390/** Opcode 0x0f 0x0f 0x8e. */
1391FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1392
1393/** Opcode 0x0f 0x0f 0x90. */
1394FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1395
1396/** Opcode 0x0f 0x0f 0x94. */
1397FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1398
1399/** Opcode 0x0f 0x0f 0x96. */
1400FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1401
1402/** Opcode 0x0f 0x0f 0x97. */
1403FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1404
1405/** Opcode 0x0f 0x0f 0x9a. */
1406FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1407
1408/** Opcode 0x0f 0x0f 0x9e. */
1409FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1410
1411/** Opcode 0x0f 0x0f 0xa0. */
1412FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1413
1414/** Opcode 0x0f 0x0f 0xa4. */
1415FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0xa6. */
1418FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0xa7. */
1421FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0xaa. */
1424FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0xae. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1428
1429/** Opcode 0x0f 0x0f 0xb0. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0xb4. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0xb6. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0xb7. */
1439FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0xbb. */
1442FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0xbf. */
1445FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1446
1447
1448/** Opcode 0x0f 0x0f. */
1449FNIEMOP_DEF(iemOp_3Dnow)
1450{
1451 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1452 {
1453 IEMOP_MNEMONIC("3Dnow");
1454 return IEMOP_RAISE_INVALID_OPCODE();
1455 }
1456
1457 /* This is pretty sparse, use switch instead of table. */
1458 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1459 switch (b)
1460 {
1461 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1462 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1463 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1464 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1465 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1466 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1467 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1468 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1469 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1470 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1471 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1472 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1473 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1474 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1475 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1476 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1477 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1478 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1479 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1480 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1481 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1482 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1483 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1484 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1485 default:
1486 return IEMOP_RAISE_INVALID_OPCODE();
1487 }
1488}
1489
1490
1491/** Opcode 0x0f 0x10. */
1492FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1493/** Opcode 0x0f 0x11. */
1494FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1495/** Opcode 0x0f 0x12. */
1496FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1497/** Opcode 0x0f 0x13. */
1498FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1499/** Opcode 0x0f 0x14. */
1500FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1501/** Opcode 0x0f 0x15. */
1502FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1503/** Opcode 0x0f 0x16. */
1504FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1505/** Opcode 0x0f 0x17. */
1506FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1507
1508
1509/** Opcode 0x0f 0x18. */
1510FNIEMOP_DEF(iemOp_prefetch_Grp16)
1511{
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1514 {
1515 IEMOP_HLP_NO_LOCK_PREFIX();
1516 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1517 {
1518 case 4: /* Aliased to /0 for the time being according to AMD. */
1519 case 5: /* Aliased to /0 for the time being according to AMD. */
1520 case 6: /* Aliased to /0 for the time being according to AMD. */
1521 case 7: /* Aliased to /0 for the time being according to AMD. */
1522 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1523 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1524 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1525 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1527 }
1528
1529 IEM_MC_BEGIN(0, 1);
1530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 /* Currently a NOP. */
1533 IEM_MC_ADVANCE_RIP();
1534 IEM_MC_END();
1535 return VINF_SUCCESS;
1536 }
1537
1538 return IEMOP_RAISE_INVALID_OPCODE();
1539}
1540
1541
1542/** Opcode 0x0f 0x19..0x1f. */
1543FNIEMOP_DEF(iemOp_nop_Ev)
1544{
1545 IEMOP_HLP_NO_LOCK_PREFIX();
1546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1548 {
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_ADVANCE_RIP();
1551 IEM_MC_END();
1552 }
1553 else
1554 {
1555 IEM_MC_BEGIN(0, 1);
1556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1558 /* Currently a NOP. */
1559 IEM_MC_ADVANCE_RIP();
1560 IEM_MC_END();
1561 }
1562 return VINF_SUCCESS;
1563}
1564
1565
1566/** Opcode 0x0f 0x20. */
1567FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1568{
1569 /* mod is ignored, as is operand size overrides. */
1570 IEMOP_MNEMONIC("mov Rd,Cd");
1571 IEMOP_HLP_MIN_386();
1572 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1573 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1574 else
1575 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1576
1577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1578 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1579 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1580 {
1581 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1582 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1583 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1584 iCrReg |= 8;
1585 }
1586 switch (iCrReg)
1587 {
1588 case 0: case 2: case 3: case 4: case 8:
1589 break;
1590 default:
1591 return IEMOP_RAISE_INVALID_OPCODE();
1592 }
1593 IEMOP_HLP_DONE_DECODING();
1594
1595 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1596}
1597
1598
1599/** Opcode 0x0f 0x21. */
1600FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1601{
1602 IEMOP_MNEMONIC("mov Rd,Dd");
1603 IEMOP_HLP_MIN_386();
1604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1605 IEMOP_HLP_NO_LOCK_PREFIX();
1606 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1607 return IEMOP_RAISE_INVALID_OPCODE();
1608 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1609 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1610 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1611}
1612
1613
1614/** Opcode 0x0f 0x22. */
1615FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1616{
1617 /* mod is ignored, as is operand size overrides. */
1618 IEMOP_MNEMONIC("mov Cd,Rd");
1619 IEMOP_HLP_MIN_386();
1620 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1621 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1622 else
1623 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1624
1625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1626 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1627 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1628 {
1629 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1630 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1631 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1632 iCrReg |= 8;
1633 }
1634 switch (iCrReg)
1635 {
1636 case 0: case 2: case 3: case 4: case 8:
1637 break;
1638 default:
1639 return IEMOP_RAISE_INVALID_OPCODE();
1640 }
1641 IEMOP_HLP_DONE_DECODING();
1642
1643 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1644}
1645
1646
1647/** Opcode 0x0f 0x23. */
1648FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1649{
1650 IEMOP_MNEMONIC("mov Dd,Rd");
1651 IEMOP_HLP_MIN_386();
1652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1654 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1655 return IEMOP_RAISE_INVALID_OPCODE();
1656 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1657 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1658 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1659}
1660
1661
1662/** Opcode 0x0f 0x24. */
1663FNIEMOP_DEF(iemOp_mov_Rd_Td)
1664{
1665 IEMOP_MNEMONIC("mov Rd,Td");
1666 /** @todo works on 386 and 486. */
1667 /* The RM byte is not considered, see testcase. */
1668 return IEMOP_RAISE_INVALID_OPCODE();
1669}
1670
1671
1672/** Opcode 0x0f 0x26. */
1673FNIEMOP_DEF(iemOp_mov_Td_Rd)
1674{
1675 IEMOP_MNEMONIC("mov Td,Rd");
1676 /** @todo works on 386 and 486. */
1677 /* The RM byte is not considered, see testcase. */
1678 return IEMOP_RAISE_INVALID_OPCODE();
1679}
1680
1681
1682/** Opcode 0x0f 0x28. */
1683FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1684/** Opcode 0x0f 0x29. */
1685FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1686/** Opcode 0x0f 0x2a. */
1687FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1688/** Opcode 0x0f 0x2b. */
1689FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1690/** Opcode 0x0f 0x2c. */
1691FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1692/** Opcode 0x0f 0x2d. */
1693FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1694/** Opcode 0x0f 0x2e. */
1695FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1696/** Opcode 0x0f 0x2f. */
1697FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1698
1699
1700/** Opcode 0x0f 0x30. */
1701FNIEMOP_DEF(iemOp_wrmsr)
1702{
1703 IEMOP_MNEMONIC("wrmsr");
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1706}
1707
1708
1709/** Opcode 0x0f 0x31. */
1710FNIEMOP_DEF(iemOp_rdtsc)
1711{
1712 IEMOP_MNEMONIC("rdtsc");
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1715}
1716
1717
1718/** Opcode 0x0f 0x33. */
1719FNIEMOP_DEF(iemOp_rdmsr)
1720{
1721 IEMOP_MNEMONIC("rdmsr");
1722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1723 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1724}
1725
1726
1727/** Opcode 0x0f 0x34. */
1728FNIEMOP_STUB(iemOp_rdpmc);
1729/** Opcode 0x0f 0x34. */
1730FNIEMOP_STUB(iemOp_sysenter);
1731/** Opcode 0x0f 0x35. */
1732FNIEMOP_STUB(iemOp_sysexit);
1733/** Opcode 0x0f 0x37. */
1734FNIEMOP_STUB(iemOp_getsec);
1735/** Opcode 0x0f 0x38. */
1736FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1737/** Opcode 0x0f 0x3a. */
1738FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1739/** Opcode 0x0f 0x3c (?). */
1740FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1741
1742/**
1743 * Implements a conditional move.
1744 *
1745 * Wish there was an obvious way to do this where we could share and reduce
1746 * code bloat.
1747 *
1748 * @param a_Cnd The conditional "microcode" operation.
1749 */
1750#define CMOV_X(a_Cnd) \
1751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1753 { \
1754 switch (pIemCpu->enmEffOpSize) \
1755 { \
1756 case IEMMODE_16BIT: \
1757 IEM_MC_BEGIN(0, 1); \
1758 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1759 a_Cnd { \
1760 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1761 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1762 } IEM_MC_ENDIF(); \
1763 IEM_MC_ADVANCE_RIP(); \
1764 IEM_MC_END(); \
1765 return VINF_SUCCESS; \
1766 \
1767 case IEMMODE_32BIT: \
1768 IEM_MC_BEGIN(0, 1); \
1769 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1770 a_Cnd { \
1771 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1772 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1773 } IEM_MC_ELSE() { \
1774 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1775 } IEM_MC_ENDIF(); \
1776 IEM_MC_ADVANCE_RIP(); \
1777 IEM_MC_END(); \
1778 return VINF_SUCCESS; \
1779 \
1780 case IEMMODE_64BIT: \
1781 IEM_MC_BEGIN(0, 1); \
1782 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1783 a_Cnd { \
1784 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1785 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1786 } IEM_MC_ENDIF(); \
1787 IEM_MC_ADVANCE_RIP(); \
1788 IEM_MC_END(); \
1789 return VINF_SUCCESS; \
1790 \
1791 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1792 } \
1793 } \
1794 else \
1795 { \
1796 switch (pIemCpu->enmEffOpSize) \
1797 { \
1798 case IEMMODE_16BIT: \
1799 IEM_MC_BEGIN(0, 2); \
1800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1801 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1803 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1804 a_Cnd { \
1805 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1806 } IEM_MC_ENDIF(); \
1807 IEM_MC_ADVANCE_RIP(); \
1808 IEM_MC_END(); \
1809 return VINF_SUCCESS; \
1810 \
1811 case IEMMODE_32BIT: \
1812 IEM_MC_BEGIN(0, 2); \
1813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1814 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1816 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1817 a_Cnd { \
1818 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1819 } IEM_MC_ELSE() { \
1820 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1821 } IEM_MC_ENDIF(); \
1822 IEM_MC_ADVANCE_RIP(); \
1823 IEM_MC_END(); \
1824 return VINF_SUCCESS; \
1825 \
1826 case IEMMODE_64BIT: \
1827 IEM_MC_BEGIN(0, 2); \
1828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1829 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1831 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1832 a_Cnd { \
1833 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1834 } IEM_MC_ENDIF(); \
1835 IEM_MC_ADVANCE_RIP(); \
1836 IEM_MC_END(); \
1837 return VINF_SUCCESS; \
1838 \
1839 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1840 } \
1841 } do {} while (0)
1842
1843
1844
1845/** Opcode 0x0f 0x40. */
1846FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1847{
1848 IEMOP_MNEMONIC("cmovo Gv,Ev");
1849 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1850}
1851
1852
1853/** Opcode 0x0f 0x41. */
1854FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1855{
1856 IEMOP_MNEMONIC("cmovno Gv,Ev");
1857 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1858}
1859
1860
1861/** Opcode 0x0f 0x42. */
1862FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1863{
1864 IEMOP_MNEMONIC("cmovc Gv,Ev");
1865 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1866}
1867
1868
1869/** Opcode 0x0f 0x43. */
1870FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1871{
1872 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1873 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1874}
1875
1876
1877/** Opcode 0x0f 0x44. */
1878FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1879{
1880 IEMOP_MNEMONIC("cmove Gv,Ev");
1881 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1882}
1883
1884
1885/** Opcode 0x0f 0x45. */
1886FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1887{
1888 IEMOP_MNEMONIC("cmovne Gv,Ev");
1889 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1890}
1891
1892
1893/** Opcode 0x0f 0x46. */
1894FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1895{
1896 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1897 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1898}
1899
1900
1901/** Opcode 0x0f 0x47. */
1902FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1903{
1904 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1905 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1906}
1907
1908
1909/** Opcode 0x0f 0x48. */
1910FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1911{
1912 IEMOP_MNEMONIC("cmovs Gv,Ev");
1913 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1914}
1915
1916
1917/** Opcode 0x0f 0x49. */
1918FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1919{
1920 IEMOP_MNEMONIC("cmovns Gv,Ev");
1921 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1922}
1923
1924
1925/** Opcode 0x0f 0x4a. */
1926FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1927{
1928 IEMOP_MNEMONIC("cmovp Gv,Ev");
1929 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1930}
1931
1932
1933/** Opcode 0x0f 0x4b. */
1934FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1935{
1936 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1937 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1938}
1939
1940
1941/** Opcode 0x0f 0x4c. */
1942FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1943{
1944 IEMOP_MNEMONIC("cmovl Gv,Ev");
1945 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1946}
1947
1948
1949/** Opcode 0x0f 0x4d. */
1950FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1951{
1952 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1953 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1954}
1955
1956
1957/** Opcode 0x0f 0x4e. */
1958FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1959{
1960 IEMOP_MNEMONIC("cmovle Gv,Ev");
1961 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1962}
1963
1964
1965/** Opcode 0x0f 0x4f. */
1966FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1967{
1968 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1969 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1970}
1971
1972#undef CMOV_X
1973
1974/** Opcode 0x0f 0x50. */
1975FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1976/** Opcode 0x0f 0x51. */
1977FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1978/** Opcode 0x0f 0x52. */
1979FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1980/** Opcode 0x0f 0x53. */
1981FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1982/** Opcode 0x0f 0x54. */
1983FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1984/** Opcode 0x0f 0x55. */
1985FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1986/** Opcode 0x0f 0x56. */
1987FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1988/** Opcode 0x0f 0x57. */
1989FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1990/** Opcode 0x0f 0x58. */
1991FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1992/** Opcode 0x0f 0x59. */
1993FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1994/** Opcode 0x0f 0x5a. */
1995FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1996/** Opcode 0x0f 0x5b. */
1997FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1998/** Opcode 0x0f 0x5c. */
1999FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2000/** Opcode 0x0f 0x5d. */
2001FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2002/** Opcode 0x0f 0x5e. */
2003FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2004/** Opcode 0x0f 0x5f. */
2005FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2006
2007
2008/**
2009 * Common worker for SSE2 and MMX instructions on the forms:
2010 * pxxxx xmm1, xmm2/mem128
2011 * pxxxx mm1, mm2/mem32
2012 *
2013 * The 2nd operand is the first half of a register, which in the memory case
2014 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2015 * memory accessed for MMX.
2016 *
2017 * Exceptions type 4.
2018 */
2019FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2020{
2021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2022 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2023 {
2024 case IEM_OP_PRF_SIZE_OP: /* SSE */
2025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2026 {
2027 /*
2028 * Register, register.
2029 */
2030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2031 IEM_MC_BEGIN(2, 0);
2032 IEM_MC_ARG(uint128_t *, pDst, 0);
2033 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2034 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2035 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2036 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2037 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2038 IEM_MC_ADVANCE_RIP();
2039 IEM_MC_END();
2040 }
2041 else
2042 {
2043 /*
2044 * Register, memory.
2045 */
2046 IEM_MC_BEGIN(2, 2);
2047 IEM_MC_ARG(uint128_t *, pDst, 0);
2048 IEM_MC_LOCAL(uint64_t, uSrc);
2049 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2051
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2055 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2056
2057 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2058 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2059
2060 IEM_MC_ADVANCE_RIP();
2061 IEM_MC_END();
2062 }
2063 return VINF_SUCCESS;
2064
2065 case 0: /* MMX */
2066 if (!pImpl->pfnU64)
2067 return IEMOP_RAISE_INVALID_OPCODE();
2068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2069 {
2070 /*
2071 * Register, register.
2072 */
2073 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2074 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_BEGIN(2, 0);
2077 IEM_MC_ARG(uint64_t *, pDst, 0);
2078 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2079 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2080 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2081 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2082 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2083 IEM_MC_ADVANCE_RIP();
2084 IEM_MC_END();
2085 }
2086 else
2087 {
2088 /*
2089 * Register, memory.
2090 */
2091 IEM_MC_BEGIN(2, 2);
2092 IEM_MC_ARG(uint64_t *, pDst, 0);
2093 IEM_MC_LOCAL(uint32_t, uSrc);
2094 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2096
2097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2099 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2100 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2101
2102 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2103 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2104
2105 IEM_MC_ADVANCE_RIP();
2106 IEM_MC_END();
2107 }
2108 return VINF_SUCCESS;
2109
2110 default:
2111 return IEMOP_RAISE_INVALID_OPCODE();
2112 }
2113}
2114
2115
2116/** Opcode 0x0f 0x60. */
2117FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2118{
2119 IEMOP_MNEMONIC("punpcklbw");
2120 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2121}
2122
2123
2124/** Opcode 0x0f 0x61. */
2125FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2126{
2127 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2128 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2129}
2130
2131
2132/** Opcode 0x0f 0x62. */
2133FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2134{
2135 IEMOP_MNEMONIC("punpckldq");
2136 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2137}
2138
2139
2140/** Opcode 0x0f 0x63. */
2141FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2142/** Opcode 0x0f 0x64. */
2143FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2144/** Opcode 0x0f 0x65. */
2145FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2146/** Opcode 0x0f 0x66. */
2147FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2148/** Opcode 0x0f 0x67. */
2149FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2150
2151
2152/**
2153 * Common worker for SSE2 and MMX instructions on the forms:
2154 * pxxxx xmm1, xmm2/mem128
2155 * pxxxx mm1, mm2/mem64
2156 *
2157 * The 2nd operand is the second half of a register, which in the memory case
2158 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2159 * where it may read the full 128 bits or only the upper 64 bits.
2160 *
2161 * Exceptions type 4.
2162 */
2163FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2164{
2165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2166 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2167 {
2168 case IEM_OP_PRF_SIZE_OP: /* SSE */
2169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2170 {
2171 /*
2172 * Register, register.
2173 */
2174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2175 IEM_MC_BEGIN(2, 0);
2176 IEM_MC_ARG(uint128_t *, pDst, 0);
2177 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2179 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2180 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2181 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2182 IEM_MC_ADVANCE_RIP();
2183 IEM_MC_END();
2184 }
2185 else
2186 {
2187 /*
2188 * Register, memory.
2189 */
2190 IEM_MC_BEGIN(2, 2);
2191 IEM_MC_ARG(uint128_t *, pDst, 0);
2192 IEM_MC_LOCAL(uint128_t, uSrc);
2193 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2195
2196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2198 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2199 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2200
2201 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2202 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2203
2204 IEM_MC_ADVANCE_RIP();
2205 IEM_MC_END();
2206 }
2207 return VINF_SUCCESS;
2208
2209 case 0: /* MMX */
2210 if (!pImpl->pfnU64)
2211 return IEMOP_RAISE_INVALID_OPCODE();
2212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2213 {
2214 /*
2215 * Register, register.
2216 */
2217 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2218 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_BEGIN(2, 0);
2221 IEM_MC_ARG(uint64_t *, pDst, 0);
2222 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2223 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2224 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2225 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2226 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * Register, memory.
2234 */
2235 IEM_MC_BEGIN(2, 2);
2236 IEM_MC_ARG(uint64_t *, pDst, 0);
2237 IEM_MC_LOCAL(uint64_t, uSrc);
2238 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2240
2241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2244 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2245
2246 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2247 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2248
2249 IEM_MC_ADVANCE_RIP();
2250 IEM_MC_END();
2251 }
2252 return VINF_SUCCESS;
2253
2254 default:
2255 return IEMOP_RAISE_INVALID_OPCODE();
2256 }
2257}
2258
2259
2260/** Opcode 0x0f 0x68. */
2261FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2262{
2263 IEMOP_MNEMONIC("punpckhbw");
2264 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2265}
2266
2267
2268/** Opcode 0x0f 0x69. */
2269FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2270{
2271 IEMOP_MNEMONIC("punpckhwd");
2272 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2273}
2274
2275
2276/** Opcode 0x0f 0x6a. */
2277FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2278{
2279 IEMOP_MNEMONIC("punpckhdq");
2280 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2281}
2282
2283/** Opcode 0x0f 0x6b. */
2284FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2285
2286
2287/** Opcode 0x0f 0x6c. */
2288FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2289{
2290 IEMOP_MNEMONIC("punpcklqdq");
2291 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2292}
2293
2294
2295/** Opcode 0x0f 0x6d. */
2296FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2297{
2298 IEMOP_MNEMONIC("punpckhqdq");
2299 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2300}
2301
2302
2303/** Opcode 0x0f 0x6e. */
2304FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2305{
2306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2307 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2308 {
2309 case IEM_OP_PRF_SIZE_OP: /* SSE */
2310 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2312 {
2313 /* XMM, greg*/
2314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2315 IEM_MC_BEGIN(0, 1);
2316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2317 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2318 {
2319 IEM_MC_LOCAL(uint64_t, u64Tmp);
2320 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2321 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2322 }
2323 else
2324 {
2325 IEM_MC_LOCAL(uint32_t, u32Tmp);
2326 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2327 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2328 }
2329 IEM_MC_ADVANCE_RIP();
2330 IEM_MC_END();
2331 }
2332 else
2333 {
2334 /* XMM, [mem] */
2335 IEM_MC_BEGIN(0, 2);
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2337 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2341 {
2342 IEM_MC_LOCAL(uint64_t, u64Tmp);
2343 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2344 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2345 }
2346 else
2347 {
2348 IEM_MC_LOCAL(uint32_t, u32Tmp);
2349 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2350 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2351 }
2352 IEM_MC_ADVANCE_RIP();
2353 IEM_MC_END();
2354 }
2355 return VINF_SUCCESS;
2356
2357 case 0: /* MMX */
2358 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2360 {
2361 /* MMX, greg */
2362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2363 IEM_MC_BEGIN(0, 1);
2364 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2365 IEM_MC_LOCAL(uint64_t, u64Tmp);
2366 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2367 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2368 else
2369 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2370 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2371 IEM_MC_ADVANCE_RIP();
2372 IEM_MC_END();
2373 }
2374 else
2375 {
2376 /* MMX, [mem] */
2377 IEM_MC_BEGIN(0, 2);
2378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2382 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2383 {
2384 IEM_MC_LOCAL(uint64_t, u64Tmp);
2385 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2386 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2387 }
2388 else
2389 {
2390 IEM_MC_LOCAL(uint32_t, u32Tmp);
2391 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2392 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2393 }
2394 IEM_MC_ADVANCE_RIP();
2395 IEM_MC_END();
2396 }
2397 return VINF_SUCCESS;
2398
2399 default:
2400 return IEMOP_RAISE_INVALID_OPCODE();
2401 }
2402}
2403
2404
2405/** Opcode 0x0f 0x6f. */
2406FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2407{
2408 bool fAligned = false;
2409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2410 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2411 {
2412 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2413 fAligned = true;
2414 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2415 if (fAligned)
2416 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2417 else
2418 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2419 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2420 {
2421 /*
2422 * Register, register.
2423 */
2424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2425 IEM_MC_BEGIN(0, 1);
2426 IEM_MC_LOCAL(uint128_t, u128Tmp);
2427 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2428 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2429 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2430 IEM_MC_ADVANCE_RIP();
2431 IEM_MC_END();
2432 }
2433 else
2434 {
2435 /*
2436 * Register, memory.
2437 */
2438 IEM_MC_BEGIN(0, 2);
2439 IEM_MC_LOCAL(uint128_t, u128Tmp);
2440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2441
2442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2445 if (fAligned)
2446 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2447 else
2448 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2449 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2450
2451 IEM_MC_ADVANCE_RIP();
2452 IEM_MC_END();
2453 }
2454 return VINF_SUCCESS;
2455
2456 case 0: /* MMX */
2457 IEMOP_MNEMONIC("movq Pq,Qq");
2458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2459 {
2460 /*
2461 * Register, register.
2462 */
2463 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2464 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2466 IEM_MC_BEGIN(0, 1);
2467 IEM_MC_LOCAL(uint64_t, u64Tmp);
2468 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2469 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2470 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 else
2475 {
2476 /*
2477 * Register, memory.
2478 */
2479 IEM_MC_BEGIN(0, 2);
2480 IEM_MC_LOCAL(uint64_t, u64Tmp);
2481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2482
2483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2485 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2486 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2487 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2488
2489 IEM_MC_ADVANCE_RIP();
2490 IEM_MC_END();
2491 }
2492 return VINF_SUCCESS;
2493
2494 default:
2495 return IEMOP_RAISE_INVALID_OPCODE();
2496 }
2497}
2498
2499
2500/** Opcode 0x0f 0x70. The immediate here is evil! */
2501FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2502{
2503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2504 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2505 {
2506 case IEM_OP_PRF_SIZE_OP: /* SSE */
2507 case IEM_OP_PRF_REPNZ: /* SSE */
2508 case IEM_OP_PRF_REPZ: /* SSE */
2509 {
2510 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2511 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2512 {
2513 case IEM_OP_PRF_SIZE_OP:
2514 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2515 pfnAImpl = iemAImpl_pshufd;
2516 break;
2517 case IEM_OP_PRF_REPNZ:
2518 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2519 pfnAImpl = iemAImpl_pshuflw;
2520 break;
2521 case IEM_OP_PRF_REPZ:
2522 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2523 pfnAImpl = iemAImpl_pshufhw;
2524 break;
2525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2526 }
2527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2528 {
2529 /*
2530 * Register, register.
2531 */
2532 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2534
2535 IEM_MC_BEGIN(3, 0);
2536 IEM_MC_ARG(uint128_t *, pDst, 0);
2537 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2538 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2539 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2540 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2541 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2542 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2543 IEM_MC_ADVANCE_RIP();
2544 IEM_MC_END();
2545 }
2546 else
2547 {
2548 /*
2549 * Register, memory.
2550 */
2551 IEM_MC_BEGIN(3, 2);
2552 IEM_MC_ARG(uint128_t *, pDst, 0);
2553 IEM_MC_LOCAL(uint128_t, uSrc);
2554 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2556
2557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2558 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2559 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2562
2563 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2564 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2565 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2566
2567 IEM_MC_ADVANCE_RIP();
2568 IEM_MC_END();
2569 }
2570 return VINF_SUCCESS;
2571 }
2572
2573 case 0: /* MMX Extension */
2574 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2576 {
2577 /*
2578 * Register, register.
2579 */
2580 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582
2583 IEM_MC_BEGIN(3, 0);
2584 IEM_MC_ARG(uint64_t *, pDst, 0);
2585 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2586 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2587 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2588 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2589 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2590 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2591 IEM_MC_ADVANCE_RIP();
2592 IEM_MC_END();
2593 }
2594 else
2595 {
2596 /*
2597 * Register, memory.
2598 */
2599 IEM_MC_BEGIN(3, 2);
2600 IEM_MC_ARG(uint64_t *, pDst, 0);
2601 IEM_MC_LOCAL(uint64_t, uSrc);
2602 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2604
2605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2606 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2607 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2610
2611 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2612 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2613 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2614
2615 IEM_MC_ADVANCE_RIP();
2616 IEM_MC_END();
2617 }
2618 return VINF_SUCCESS;
2619
2620 default:
2621 return IEMOP_RAISE_INVALID_OPCODE();
2622 }
2623}
2624
2625
2626/** Opcode 0x0f 0x71 11/2. */
2627FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2628
2629/** Opcode 0x66 0x0f 0x71 11/2. */
2630FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2631
2632/** Opcode 0x0f 0x71 11/4. */
2633FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2634
2635/** Opcode 0x66 0x0f 0x71 11/4. */
2636FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2637
2638/** Opcode 0x0f 0x71 11/6. */
2639FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2640
2641/** Opcode 0x66 0x0f 0x71 11/6. */
2642FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2643
2644
2645/** Opcode 0x0f 0x71. */
2646FNIEMOP_DEF(iemOp_Grp12)
2647{
2648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2649 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2650 return IEMOP_RAISE_INVALID_OPCODE();
2651 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2652 {
2653 case 0: case 1: case 3: case 5: case 7:
2654 return IEMOP_RAISE_INVALID_OPCODE();
2655 case 2:
2656 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2657 {
2658 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2659 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2660 default: return IEMOP_RAISE_INVALID_OPCODE();
2661 }
2662 case 4:
2663 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2664 {
2665 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2666 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2667 default: return IEMOP_RAISE_INVALID_OPCODE();
2668 }
2669 case 6:
2670 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2671 {
2672 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2673 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2674 default: return IEMOP_RAISE_INVALID_OPCODE();
2675 }
2676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2677 }
2678}
2679
2680
2681/** Opcode 0x0f 0x72 11/2. */
2682FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2683
2684/** Opcode 0x66 0x0f 0x72 11/2. */
2685FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2686
2687/** Opcode 0x0f 0x72 11/4. */
2688FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2689
2690/** Opcode 0x66 0x0f 0x72 11/4. */
2691FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2692
2693/** Opcode 0x0f 0x72 11/6. */
2694FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2695
2696/** Opcode 0x66 0x0f 0x72 11/6. */
2697FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2698
2699
2700/** Opcode 0x0f 0x72. */
2701FNIEMOP_DEF(iemOp_Grp13)
2702{
2703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2704 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2705 return IEMOP_RAISE_INVALID_OPCODE();
2706 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2707 {
2708 case 0: case 1: case 3: case 5: case 7:
2709 return IEMOP_RAISE_INVALID_OPCODE();
2710 case 2:
2711 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2712 {
2713 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2714 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2715 default: return IEMOP_RAISE_INVALID_OPCODE();
2716 }
2717 case 4:
2718 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2719 {
2720 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2721 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2722 default: return IEMOP_RAISE_INVALID_OPCODE();
2723 }
2724 case 6:
2725 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2726 {
2727 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2728 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2729 default: return IEMOP_RAISE_INVALID_OPCODE();
2730 }
2731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2732 }
2733}
2734
2735
2736/** Opcode 0x0f 0x73 11/2. */
2737FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2738
2739/** Opcode 0x66 0x0f 0x73 11/2. */
2740FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2741
2742/** Opcode 0x66 0x0f 0x73 11/3. */
2743FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2744
2745/** Opcode 0x0f 0x73 11/6. */
2746FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2747
2748/** Opcode 0x66 0x0f 0x73 11/6. */
2749FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2750
2751/** Opcode 0x66 0x0f 0x73 11/7. */
2752FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2753
2754
2755/** Opcode 0x0f 0x73. */
2756FNIEMOP_DEF(iemOp_Grp14)
2757{
2758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2759 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2760 return IEMOP_RAISE_INVALID_OPCODE();
2761 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2762 {
2763 case 0: case 1: case 4: case 5:
2764 return IEMOP_RAISE_INVALID_OPCODE();
2765 case 2:
2766 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2767 {
2768 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2769 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2770 default: return IEMOP_RAISE_INVALID_OPCODE();
2771 }
2772 case 3:
2773 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2774 {
2775 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2776 default: return IEMOP_RAISE_INVALID_OPCODE();
2777 }
2778 case 6:
2779 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2780 {
2781 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2782 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2783 default: return IEMOP_RAISE_INVALID_OPCODE();
2784 }
2785 case 7:
2786 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2787 {
2788 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2789 default: return IEMOP_RAISE_INVALID_OPCODE();
2790 }
2791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2792 }
2793}
2794
2795
2796/**
2797 * Common worker for SSE2 and MMX instructions on the forms:
2798 * pxxx mm1, mm2/mem64
2799 * pxxx xmm1, xmm2/mem128
2800 *
2801 * Proper alignment of the 128-bit operand is enforced.
2802 * Exceptions type 4. SSE2 and MMX cpuid checks.
2803 */
2804FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2805{
2806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2807 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2808 {
2809 case IEM_OP_PRF_SIZE_OP: /* SSE */
2810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2811 {
2812 /*
2813 * Register, register.
2814 */
2815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2816 IEM_MC_BEGIN(2, 0);
2817 IEM_MC_ARG(uint128_t *, pDst, 0);
2818 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2819 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2820 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2821 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2822 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2823 IEM_MC_ADVANCE_RIP();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /*
2829 * Register, memory.
2830 */
2831 IEM_MC_BEGIN(2, 2);
2832 IEM_MC_ARG(uint128_t *, pDst, 0);
2833 IEM_MC_LOCAL(uint128_t, uSrc);
2834 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2836
2837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2840 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2841
2842 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2843 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2844
2845 IEM_MC_ADVANCE_RIP();
2846 IEM_MC_END();
2847 }
2848 return VINF_SUCCESS;
2849
2850 case 0: /* MMX */
2851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2852 {
2853 /*
2854 * Register, register.
2855 */
2856 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2857 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2859 IEM_MC_BEGIN(2, 0);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2862 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2863 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2864 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2865 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2866 IEM_MC_ADVANCE_RIP();
2867 IEM_MC_END();
2868 }
2869 else
2870 {
2871 /*
2872 * Register, memory.
2873 */
2874 IEM_MC_BEGIN(2, 2);
2875 IEM_MC_ARG(uint64_t *, pDst, 0);
2876 IEM_MC_LOCAL(uint64_t, uSrc);
2877 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2879
2880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2882 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2883 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2884
2885 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2886 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2887
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 return VINF_SUCCESS;
2892
2893 default:
2894 return IEMOP_RAISE_INVALID_OPCODE();
2895 }
2896}
2897
2898
2899/** Opcode 0x0f 0x74. */
2900FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2901{
2902 IEMOP_MNEMONIC("pcmpeqb");
2903 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2904}
2905
2906
2907/** Opcode 0x0f 0x75. */
2908FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2909{
2910 IEMOP_MNEMONIC("pcmpeqw");
2911 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2912}
2913
2914
2915/** Opcode 0x0f 0x76. */
2916FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2917{
2918 IEMOP_MNEMONIC("pcmpeqd");
2919 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2920}
2921
2922
2923/** Opcode 0x0f 0x77. */
2924FNIEMOP_STUB(iemOp_emms);
2925/** Opcode 0x0f 0x78. */
2926FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2927/** Opcode 0x0f 0x79. */
2928FNIEMOP_UD_STUB(iemOp_vmwrite);
2929/** Opcode 0x0f 0x7c. */
2930FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2931/** Opcode 0x0f 0x7d. */
2932FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2933
2934
2935/** Opcode 0x0f 0x7e. */
2936FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2937{
2938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2939 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2940 {
2941 case IEM_OP_PRF_SIZE_OP: /* SSE */
2942 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2943 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2944 {
2945 /* greg, XMM */
2946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2947 IEM_MC_BEGIN(0, 1);
2948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2949 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2950 {
2951 IEM_MC_LOCAL(uint64_t, u64Tmp);
2952 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2953 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2954 }
2955 else
2956 {
2957 IEM_MC_LOCAL(uint32_t, u32Tmp);
2958 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2959 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2960 }
2961 IEM_MC_ADVANCE_RIP();
2962 IEM_MC_END();
2963 }
2964 else
2965 {
2966 /* [mem], XMM */
2967 IEM_MC_BEGIN(0, 2);
2968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2969 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2972 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2973 {
2974 IEM_MC_LOCAL(uint64_t, u64Tmp);
2975 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2976 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2977 }
2978 else
2979 {
2980 IEM_MC_LOCAL(uint32_t, u32Tmp);
2981 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2982 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2983 }
2984 IEM_MC_ADVANCE_RIP();
2985 IEM_MC_END();
2986 }
2987 return VINF_SUCCESS;
2988
2989 case 0: /* MMX */
2990 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2992 {
2993 /* greg, MMX */
2994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2995 IEM_MC_BEGIN(0, 1);
2996 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2997 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2998 {
2999 IEM_MC_LOCAL(uint64_t, u64Tmp);
3000 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3001 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3002 }
3003 else
3004 {
3005 IEM_MC_LOCAL(uint32_t, u32Tmp);
3006 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3007 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3008 }
3009 IEM_MC_ADVANCE_RIP();
3010 IEM_MC_END();
3011 }
3012 else
3013 {
3014 /* [mem], MMX */
3015 IEM_MC_BEGIN(0, 2);
3016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3017 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3020 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3021 {
3022 IEM_MC_LOCAL(uint64_t, u64Tmp);
3023 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3024 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3025 }
3026 else
3027 {
3028 IEM_MC_LOCAL(uint32_t, u32Tmp);
3029 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3030 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3031 }
3032 IEM_MC_ADVANCE_RIP();
3033 IEM_MC_END();
3034 }
3035 return VINF_SUCCESS;
3036
3037 default:
3038 return IEMOP_RAISE_INVALID_OPCODE();
3039 }
3040}
3041
3042
3043/** Opcode 0x0f 0x7f. */
3044FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3045{
3046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3047 bool fAligned = false;
3048 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3049 {
3050 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3051 fAligned = true;
3052 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3053 if (fAligned)
3054 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3055 else
3056 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3063 IEM_MC_BEGIN(0, 1);
3064 IEM_MC_LOCAL(uint128_t, u128Tmp);
3065 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3066 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3067 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3068 IEM_MC_ADVANCE_RIP();
3069 IEM_MC_END();
3070 }
3071 else
3072 {
3073 /*
3074 * Register, memory.
3075 */
3076 IEM_MC_BEGIN(0, 2);
3077 IEM_MC_LOCAL(uint128_t, u128Tmp);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3083 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3084 if (fAligned)
3085 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3086 else
3087 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3088
3089 IEM_MC_ADVANCE_RIP();
3090 IEM_MC_END();
3091 }
3092 return VINF_SUCCESS;
3093
3094 case 0: /* MMX */
3095 IEMOP_MNEMONIC("movq Qq,Pq");
3096
3097 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3098 {
3099 /*
3100 * Register, register.
3101 */
3102 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3103 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_BEGIN(0, 1);
3106 IEM_MC_LOCAL(uint64_t, u64Tmp);
3107 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3108 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3109 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3110 IEM_MC_ADVANCE_RIP();
3111 IEM_MC_END();
3112 }
3113 else
3114 {
3115 /*
3116 * Register, memory.
3117 */
3118 IEM_MC_BEGIN(0, 2);
3119 IEM_MC_LOCAL(uint64_t, u64Tmp);
3120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3121
3122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3124 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3125 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3126 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3127
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 return VINF_SUCCESS;
3132
3133 default:
3134 return IEMOP_RAISE_INVALID_OPCODE();
3135 }
3136}
3137
3138
3139
3140/** Opcode 0x0f 0x80. */
3141FNIEMOP_DEF(iemOp_jo_Jv)
3142{
3143 IEMOP_MNEMONIC("jo Jv");
3144 IEMOP_HLP_MIN_386();
3145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3146 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3147 {
3148 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3149 IEMOP_HLP_NO_LOCK_PREFIX();
3150
3151 IEM_MC_BEGIN(0, 0);
3152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3153 IEM_MC_REL_JMP_S16(i16Imm);
3154 } IEM_MC_ELSE() {
3155 IEM_MC_ADVANCE_RIP();
3156 } IEM_MC_ENDIF();
3157 IEM_MC_END();
3158 }
3159 else
3160 {
3161 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3162 IEMOP_HLP_NO_LOCK_PREFIX();
3163
3164 IEM_MC_BEGIN(0, 0);
3165 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3166 IEM_MC_REL_JMP_S32(i32Imm);
3167 } IEM_MC_ELSE() {
3168 IEM_MC_ADVANCE_RIP();
3169 } IEM_MC_ENDIF();
3170 IEM_MC_END();
3171 }
3172 return VINF_SUCCESS;
3173}
3174
3175
3176/** Opcode 0x0f 0x81. */
3177FNIEMOP_DEF(iemOp_jno_Jv)
3178{
3179 IEMOP_MNEMONIC("jno Jv");
3180 IEMOP_HLP_MIN_386();
3181 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3182 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3183 {
3184 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3185 IEMOP_HLP_NO_LOCK_PREFIX();
3186
3187 IEM_MC_BEGIN(0, 0);
3188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3189 IEM_MC_ADVANCE_RIP();
3190 } IEM_MC_ELSE() {
3191 IEM_MC_REL_JMP_S16(i16Imm);
3192 } IEM_MC_ENDIF();
3193 IEM_MC_END();
3194 }
3195 else
3196 {
3197 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3198 IEMOP_HLP_NO_LOCK_PREFIX();
3199
3200 IEM_MC_BEGIN(0, 0);
3201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3202 IEM_MC_ADVANCE_RIP();
3203 } IEM_MC_ELSE() {
3204 IEM_MC_REL_JMP_S32(i32Imm);
3205 } IEM_MC_ENDIF();
3206 IEM_MC_END();
3207 }
3208 return VINF_SUCCESS;
3209}
3210
3211
3212/** Opcode 0x0f 0x82. */
3213FNIEMOP_DEF(iemOp_jc_Jv)
3214{
3215 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3216 IEMOP_HLP_MIN_386();
3217 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3218 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3219 {
3220 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3221 IEMOP_HLP_NO_LOCK_PREFIX();
3222
3223 IEM_MC_BEGIN(0, 0);
3224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3225 IEM_MC_REL_JMP_S16(i16Imm);
3226 } IEM_MC_ELSE() {
3227 IEM_MC_ADVANCE_RIP();
3228 } IEM_MC_ENDIF();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3234 IEMOP_HLP_NO_LOCK_PREFIX();
3235
3236 IEM_MC_BEGIN(0, 0);
3237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3238 IEM_MC_REL_JMP_S32(i32Imm);
3239 } IEM_MC_ELSE() {
3240 IEM_MC_ADVANCE_RIP();
3241 } IEM_MC_ENDIF();
3242 IEM_MC_END();
3243 }
3244 return VINF_SUCCESS;
3245}
3246
3247
3248/** Opcode 0x0f 0x83. */
3249FNIEMOP_DEF(iemOp_jnc_Jv)
3250{
3251 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3252 IEMOP_HLP_MIN_386();
3253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3254 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3255 {
3256 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3257 IEMOP_HLP_NO_LOCK_PREFIX();
3258
3259 IEM_MC_BEGIN(0, 0);
3260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3261 IEM_MC_ADVANCE_RIP();
3262 } IEM_MC_ELSE() {
3263 IEM_MC_REL_JMP_S16(i16Imm);
3264 } IEM_MC_ENDIF();
3265 IEM_MC_END();
3266 }
3267 else
3268 {
3269 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3270 IEMOP_HLP_NO_LOCK_PREFIX();
3271
3272 IEM_MC_BEGIN(0, 0);
3273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3274 IEM_MC_ADVANCE_RIP();
3275 } IEM_MC_ELSE() {
3276 IEM_MC_REL_JMP_S32(i32Imm);
3277 } IEM_MC_ENDIF();
3278 IEM_MC_END();
3279 }
3280 return VINF_SUCCESS;
3281}
3282
3283
3284/** Opcode 0x0f 0x84. */
3285FNIEMOP_DEF(iemOp_je_Jv)
3286{
3287 IEMOP_MNEMONIC("je/jz Jv");
3288 IEMOP_HLP_MIN_386();
3289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3290 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3291 {
3292 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3293 IEMOP_HLP_NO_LOCK_PREFIX();
3294
3295 IEM_MC_BEGIN(0, 0);
3296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3297 IEM_MC_REL_JMP_S16(i16Imm);
3298 } IEM_MC_ELSE() {
3299 IEM_MC_ADVANCE_RIP();
3300 } IEM_MC_ENDIF();
3301 IEM_MC_END();
3302 }
3303 else
3304 {
3305 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3306 IEMOP_HLP_NO_LOCK_PREFIX();
3307
3308 IEM_MC_BEGIN(0, 0);
3309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3310 IEM_MC_REL_JMP_S32(i32Imm);
3311 } IEM_MC_ELSE() {
3312 IEM_MC_ADVANCE_RIP();
3313 } IEM_MC_ENDIF();
3314 IEM_MC_END();
3315 }
3316 return VINF_SUCCESS;
3317}
3318
3319
3320/** Opcode 0x0f 0x85. */
3321FNIEMOP_DEF(iemOp_jne_Jv)
3322{
3323 IEMOP_MNEMONIC("jne/jnz Jv");
3324 IEMOP_HLP_MIN_386();
3325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3326 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3327 {
3328 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3329 IEMOP_HLP_NO_LOCK_PREFIX();
3330
3331 IEM_MC_BEGIN(0, 0);
3332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3333 IEM_MC_ADVANCE_RIP();
3334 } IEM_MC_ELSE() {
3335 IEM_MC_REL_JMP_S16(i16Imm);
3336 } IEM_MC_ENDIF();
3337 IEM_MC_END();
3338 }
3339 else
3340 {
3341 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3342 IEMOP_HLP_NO_LOCK_PREFIX();
3343
3344 IEM_MC_BEGIN(0, 0);
3345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3346 IEM_MC_ADVANCE_RIP();
3347 } IEM_MC_ELSE() {
3348 IEM_MC_REL_JMP_S32(i32Imm);
3349 } IEM_MC_ENDIF();
3350 IEM_MC_END();
3351 }
3352 return VINF_SUCCESS;
3353}
3354
3355
3356/** Opcode 0x0f 0x86. */
3357FNIEMOP_DEF(iemOp_jbe_Jv)
3358{
3359 IEMOP_MNEMONIC("jbe/jna Jv");
3360 IEMOP_HLP_MIN_386();
3361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3362 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3363 {
3364 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3365 IEMOP_HLP_NO_LOCK_PREFIX();
3366
3367 IEM_MC_BEGIN(0, 0);
3368 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3369 IEM_MC_REL_JMP_S16(i16Imm);
3370 } IEM_MC_ELSE() {
3371 IEM_MC_ADVANCE_RIP();
3372 } IEM_MC_ENDIF();
3373 IEM_MC_END();
3374 }
3375 else
3376 {
3377 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3378 IEMOP_HLP_NO_LOCK_PREFIX();
3379
3380 IEM_MC_BEGIN(0, 0);
3381 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3382 IEM_MC_REL_JMP_S32(i32Imm);
3383 } IEM_MC_ELSE() {
3384 IEM_MC_ADVANCE_RIP();
3385 } IEM_MC_ENDIF();
3386 IEM_MC_END();
3387 }
3388 return VINF_SUCCESS;
3389}
3390
3391
3392/** Opcode 0x0f 0x87. */
3393FNIEMOP_DEF(iemOp_jnbe_Jv)
3394{
3395 IEMOP_MNEMONIC("jnbe/ja Jv");
3396 IEMOP_HLP_MIN_386();
3397 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3398 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3399 {
3400 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3401 IEMOP_HLP_NO_LOCK_PREFIX();
3402
3403 IEM_MC_BEGIN(0, 0);
3404 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3405 IEM_MC_ADVANCE_RIP();
3406 } IEM_MC_ELSE() {
3407 IEM_MC_REL_JMP_S16(i16Imm);
3408 } IEM_MC_ENDIF();
3409 IEM_MC_END();
3410 }
3411 else
3412 {
3413 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3414 IEMOP_HLP_NO_LOCK_PREFIX();
3415
3416 IEM_MC_BEGIN(0, 0);
3417 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3418 IEM_MC_ADVANCE_RIP();
3419 } IEM_MC_ELSE() {
3420 IEM_MC_REL_JMP_S32(i32Imm);
3421 } IEM_MC_ENDIF();
3422 IEM_MC_END();
3423 }
3424 return VINF_SUCCESS;
3425}
3426
3427
3428/** Opcode 0x0f 0x88. */
3429FNIEMOP_DEF(iemOp_js_Jv)
3430{
3431 IEMOP_MNEMONIC("js Jv");
3432 IEMOP_HLP_MIN_386();
3433 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3434 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3435 {
3436 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3437 IEMOP_HLP_NO_LOCK_PREFIX();
3438
3439 IEM_MC_BEGIN(0, 0);
3440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3441 IEM_MC_REL_JMP_S16(i16Imm);
3442 } IEM_MC_ELSE() {
3443 IEM_MC_ADVANCE_RIP();
3444 } IEM_MC_ENDIF();
3445 IEM_MC_END();
3446 }
3447 else
3448 {
3449 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3450 IEMOP_HLP_NO_LOCK_PREFIX();
3451
3452 IEM_MC_BEGIN(0, 0);
3453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3454 IEM_MC_REL_JMP_S32(i32Imm);
3455 } IEM_MC_ELSE() {
3456 IEM_MC_ADVANCE_RIP();
3457 } IEM_MC_ENDIF();
3458 IEM_MC_END();
3459 }
3460 return VINF_SUCCESS;
3461}
3462
3463
3464/** Opcode 0x0f 0x89. */
3465FNIEMOP_DEF(iemOp_jns_Jv)
3466{
3467 IEMOP_MNEMONIC("jns Jv");
3468 IEMOP_HLP_MIN_386();
3469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3470 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3471 {
3472 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3473 IEMOP_HLP_NO_LOCK_PREFIX();
3474
3475 IEM_MC_BEGIN(0, 0);
3476 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3477 IEM_MC_ADVANCE_RIP();
3478 } IEM_MC_ELSE() {
3479 IEM_MC_REL_JMP_S16(i16Imm);
3480 } IEM_MC_ENDIF();
3481 IEM_MC_END();
3482 }
3483 else
3484 {
3485 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3486 IEMOP_HLP_NO_LOCK_PREFIX();
3487
3488 IEM_MC_BEGIN(0, 0);
3489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3490 IEM_MC_ADVANCE_RIP();
3491 } IEM_MC_ELSE() {
3492 IEM_MC_REL_JMP_S32(i32Imm);
3493 } IEM_MC_ENDIF();
3494 IEM_MC_END();
3495 }
3496 return VINF_SUCCESS;
3497}
3498
3499
3500/** Opcode 0x0f 0x8a. */
3501FNIEMOP_DEF(iemOp_jp_Jv)
3502{
3503 IEMOP_MNEMONIC("jp Jv");
3504 IEMOP_HLP_MIN_386();
3505 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3506 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3507 {
3508 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3509 IEMOP_HLP_NO_LOCK_PREFIX();
3510
3511 IEM_MC_BEGIN(0, 0);
3512 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3513 IEM_MC_REL_JMP_S16(i16Imm);
3514 } IEM_MC_ELSE() {
3515 IEM_MC_ADVANCE_RIP();
3516 } IEM_MC_ENDIF();
3517 IEM_MC_END();
3518 }
3519 else
3520 {
3521 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3522 IEMOP_HLP_NO_LOCK_PREFIX();
3523
3524 IEM_MC_BEGIN(0, 0);
3525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3526 IEM_MC_REL_JMP_S32(i32Imm);
3527 } IEM_MC_ELSE() {
3528 IEM_MC_ADVANCE_RIP();
3529 } IEM_MC_ENDIF();
3530 IEM_MC_END();
3531 }
3532 return VINF_SUCCESS;
3533}
3534
3535
3536/** Opcode 0x0f 0x8b. */
3537FNIEMOP_DEF(iemOp_jnp_Jv)
3538{
3539 IEMOP_MNEMONIC("jo Jv");
3540 IEMOP_HLP_MIN_386();
3541 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3542 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3543 {
3544 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3545 IEMOP_HLP_NO_LOCK_PREFIX();
3546
3547 IEM_MC_BEGIN(0, 0);
3548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3549 IEM_MC_ADVANCE_RIP();
3550 } IEM_MC_ELSE() {
3551 IEM_MC_REL_JMP_S16(i16Imm);
3552 } IEM_MC_ENDIF();
3553 IEM_MC_END();
3554 }
3555 else
3556 {
3557 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3558 IEMOP_HLP_NO_LOCK_PREFIX();
3559
3560 IEM_MC_BEGIN(0, 0);
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3562 IEM_MC_ADVANCE_RIP();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S32(i32Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567 }
3568 return VINF_SUCCESS;
3569}
3570
3571
3572/** Opcode 0x0f 0x8c. */
3573FNIEMOP_DEF(iemOp_jl_Jv)
3574{
3575 IEMOP_MNEMONIC("jl/jnge Jv");
3576 IEMOP_HLP_MIN_386();
3577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3578 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3579 {
3580 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3581 IEMOP_HLP_NO_LOCK_PREFIX();
3582
3583 IEM_MC_BEGIN(0, 0);
3584 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3585 IEM_MC_REL_JMP_S16(i16Imm);
3586 } IEM_MC_ELSE() {
3587 IEM_MC_ADVANCE_RIP();
3588 } IEM_MC_ENDIF();
3589 IEM_MC_END();
3590 }
3591 else
3592 {
3593 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3594 IEMOP_HLP_NO_LOCK_PREFIX();
3595
3596 IEM_MC_BEGIN(0, 0);
3597 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3598 IEM_MC_REL_JMP_S32(i32Imm);
3599 } IEM_MC_ELSE() {
3600 IEM_MC_ADVANCE_RIP();
3601 } IEM_MC_ENDIF();
3602 IEM_MC_END();
3603 }
3604 return VINF_SUCCESS;
3605}
3606
3607
3608/** Opcode 0x0f 0x8d. */
3609FNIEMOP_DEF(iemOp_jnl_Jv)
3610{
3611 IEMOP_MNEMONIC("jnl/jge Jv");
3612 IEMOP_HLP_MIN_386();
3613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3614 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3615 {
3616 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3617 IEMOP_HLP_NO_LOCK_PREFIX();
3618
3619 IEM_MC_BEGIN(0, 0);
3620 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3621 IEM_MC_ADVANCE_RIP();
3622 } IEM_MC_ELSE() {
3623 IEM_MC_REL_JMP_S16(i16Imm);
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626 }
3627 else
3628 {
3629 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3630 IEMOP_HLP_NO_LOCK_PREFIX();
3631
3632 IEM_MC_BEGIN(0, 0);
3633 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3634 IEM_MC_ADVANCE_RIP();
3635 } IEM_MC_ELSE() {
3636 IEM_MC_REL_JMP_S32(i32Imm);
3637 } IEM_MC_ENDIF();
3638 IEM_MC_END();
3639 }
3640 return VINF_SUCCESS;
3641}
3642
3643
3644/** Opcode 0x0f 0x8e. */
3645FNIEMOP_DEF(iemOp_jle_Jv)
3646{
3647 IEMOP_MNEMONIC("jle/jng Jv");
3648 IEMOP_HLP_MIN_386();
3649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3650 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3651 {
3652 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3653 IEMOP_HLP_NO_LOCK_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0);
3656 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3657 IEM_MC_REL_JMP_S16(i16Imm);
3658 } IEM_MC_ELSE() {
3659 IEM_MC_ADVANCE_RIP();
3660 } IEM_MC_ENDIF();
3661 IEM_MC_END();
3662 }
3663 else
3664 {
3665 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3666 IEMOP_HLP_NO_LOCK_PREFIX();
3667
3668 IEM_MC_BEGIN(0, 0);
3669 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3670 IEM_MC_REL_JMP_S32(i32Imm);
3671 } IEM_MC_ELSE() {
3672 IEM_MC_ADVANCE_RIP();
3673 } IEM_MC_ENDIF();
3674 IEM_MC_END();
3675 }
3676 return VINF_SUCCESS;
3677}
3678
3679
3680/** Opcode 0x0f 0x8f. */
3681FNIEMOP_DEF(iemOp_jnle_Jv)
3682{
3683 IEMOP_MNEMONIC("jnle/jg Jv");
3684 IEMOP_HLP_MIN_386();
3685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3686 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3687 {
3688 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3689 IEMOP_HLP_NO_LOCK_PREFIX();
3690
3691 IEM_MC_BEGIN(0, 0);
3692 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3693 IEM_MC_ADVANCE_RIP();
3694 } IEM_MC_ELSE() {
3695 IEM_MC_REL_JMP_S16(i16Imm);
3696 } IEM_MC_ENDIF();
3697 IEM_MC_END();
3698 }
3699 else
3700 {
3701 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3702 IEMOP_HLP_NO_LOCK_PREFIX();
3703
3704 IEM_MC_BEGIN(0, 0);
3705 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3706 IEM_MC_ADVANCE_RIP();
3707 } IEM_MC_ELSE() {
3708 IEM_MC_REL_JMP_S32(i32Imm);
3709 } IEM_MC_ENDIF();
3710 IEM_MC_END();
3711 }
3712 return VINF_SUCCESS;
3713}
3714
3715
3716/** Opcode 0x0f 0x90. */
3717FNIEMOP_DEF(iemOp_seto_Eb)
3718{
3719 IEMOP_MNEMONIC("seto Eb");
3720 IEMOP_HLP_MIN_386();
3721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3722 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3723
3724 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3725 * any way. AMD says it's "unused", whatever that means. We're
3726 * ignoring for now. */
3727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3728 {
3729 /* register target */
3730 IEM_MC_BEGIN(0, 0);
3731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3733 } IEM_MC_ELSE() {
3734 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3735 } IEM_MC_ENDIF();
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 }
3739 else
3740 {
3741 /* memory target */
3742 IEM_MC_BEGIN(0, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3746 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3747 } IEM_MC_ELSE() {
3748 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3749 } IEM_MC_ENDIF();
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 return VINF_SUCCESS;
3754}
3755
3756
3757/** Opcode 0x0f 0x91. */
3758FNIEMOP_DEF(iemOp_setno_Eb)
3759{
3760 IEMOP_MNEMONIC("setno Eb");
3761 IEMOP_HLP_MIN_386();
3762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3763 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3764
3765 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3766 * any way. AMD says it's "unused", whatever that means. We're
3767 * ignoring for now. */
3768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3769 {
3770 /* register target */
3771 IEM_MC_BEGIN(0, 0);
3772 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3773 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3774 } IEM_MC_ELSE() {
3775 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3776 } IEM_MC_ENDIF();
3777 IEM_MC_ADVANCE_RIP();
3778 IEM_MC_END();
3779 }
3780 else
3781 {
3782 /* memory target */
3783 IEM_MC_BEGIN(0, 1);
3784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3787 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3788 } IEM_MC_ELSE() {
3789 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3790 } IEM_MC_ENDIF();
3791 IEM_MC_ADVANCE_RIP();
3792 IEM_MC_END();
3793 }
3794 return VINF_SUCCESS;
3795}
3796
3797
3798/** Opcode 0x0f 0x92. */
3799FNIEMOP_DEF(iemOp_setc_Eb)
3800{
3801 IEMOP_MNEMONIC("setc Eb");
3802 IEMOP_HLP_MIN_386();
3803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3804 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3805
3806 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3807 * any way. AMD says it's "unused", whatever that means. We're
3808 * ignoring for now. */
3809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3810 {
3811 /* register target */
3812 IEM_MC_BEGIN(0, 0);
3813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3814 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3815 } IEM_MC_ELSE() {
3816 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3817 } IEM_MC_ENDIF();
3818 IEM_MC_ADVANCE_RIP();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 /* memory target */
3824 IEM_MC_BEGIN(0, 1);
3825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3828 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3831 } IEM_MC_ENDIF();
3832 IEM_MC_ADVANCE_RIP();
3833 IEM_MC_END();
3834 }
3835 return VINF_SUCCESS;
3836}
3837
3838
3839/** Opcode 0x0f 0x93. */
3840FNIEMOP_DEF(iemOp_setnc_Eb)
3841{
3842 IEMOP_MNEMONIC("setnc Eb");
3843 IEMOP_HLP_MIN_386();
3844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3845 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3846
3847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3848 * any way. AMD says it's "unused", whatever that means. We're
3849 * ignoring for now. */
3850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3851 {
3852 /* register target */
3853 IEM_MC_BEGIN(0, 0);
3854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3856 } IEM_MC_ELSE() {
3857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3858 } IEM_MC_ENDIF();
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 else
3863 {
3864 /* memory target */
3865 IEM_MC_BEGIN(0, 1);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3869 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3870 } IEM_MC_ELSE() {
3871 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877}
3878
3879
3880/** Opcode 0x0f 0x94. */
3881FNIEMOP_DEF(iemOp_sete_Eb)
3882{
3883 IEMOP_MNEMONIC("sete Eb");
3884 IEMOP_HLP_MIN_386();
3885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3886 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3887
3888 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3889 * any way. AMD says it's "unused", whatever that means. We're
3890 * ignoring for now. */
3891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3892 {
3893 /* register target */
3894 IEM_MC_BEGIN(0, 0);
3895 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3896 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3897 } IEM_MC_ELSE() {
3898 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3899 } IEM_MC_ENDIF();
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 }
3903 else
3904 {
3905 /* memory target */
3906 IEM_MC_BEGIN(0, 1);
3907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3910 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3911 } IEM_MC_ELSE() {
3912 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3913 } IEM_MC_ENDIF();
3914 IEM_MC_ADVANCE_RIP();
3915 IEM_MC_END();
3916 }
3917 return VINF_SUCCESS;
3918}
3919
3920
3921/** Opcode 0x0f 0x95. */
3922FNIEMOP_DEF(iemOp_setne_Eb)
3923{
3924 IEMOP_MNEMONIC("setne Eb");
3925 IEMOP_HLP_MIN_386();
3926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3927 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3928
3929 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3930 * any way. AMD says it's "unused", whatever that means. We're
3931 * ignoring for now. */
3932 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3933 {
3934 /* register target */
3935 IEM_MC_BEGIN(0, 0);
3936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3938 } IEM_MC_ELSE() {
3939 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3940 } IEM_MC_ENDIF();
3941 IEM_MC_ADVANCE_RIP();
3942 IEM_MC_END();
3943 }
3944 else
3945 {
3946 /* memory target */
3947 IEM_MC_BEGIN(0, 1);
3948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3951 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3952 } IEM_MC_ELSE() {
3953 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3954 } IEM_MC_ENDIF();
3955 IEM_MC_ADVANCE_RIP();
3956 IEM_MC_END();
3957 }
3958 return VINF_SUCCESS;
3959}
3960
3961
3962/** Opcode 0x0f 0x96. */
3963FNIEMOP_DEF(iemOp_setbe_Eb)
3964{
3965 IEMOP_MNEMONIC("setbe Eb");
3966 IEMOP_HLP_MIN_386();
3967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3968 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3969
3970 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3971 * any way. AMD says it's "unused", whatever that means. We're
3972 * ignoring for now. */
3973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3974 {
3975 /* register target */
3976 IEM_MC_BEGIN(0, 0);
3977 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3978 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3979 } IEM_MC_ELSE() {
3980 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3981 } IEM_MC_ENDIF();
3982 IEM_MC_ADVANCE_RIP();
3983 IEM_MC_END();
3984 }
3985 else
3986 {
3987 /* memory target */
3988 IEM_MC_BEGIN(0, 1);
3989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3991 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3992 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3995 } IEM_MC_ENDIF();
3996 IEM_MC_ADVANCE_RIP();
3997 IEM_MC_END();
3998 }
3999 return VINF_SUCCESS;
4000}
4001
4002
4003/** Opcode 0x0f 0x97. */
4004FNIEMOP_DEF(iemOp_setnbe_Eb)
4005{
4006 IEMOP_MNEMONIC("setnbe Eb");
4007 IEMOP_HLP_MIN_386();
4008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4009 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4010
4011 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4012 * any way. AMD says it's "unused", whatever that means. We're
4013 * ignoring for now. */
4014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4015 {
4016 /* register target */
4017 IEM_MC_BEGIN(0, 0);
4018 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4019 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4020 } IEM_MC_ELSE() {
4021 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4022 } IEM_MC_ENDIF();
4023 IEM_MC_ADVANCE_RIP();
4024 IEM_MC_END();
4025 }
4026 else
4027 {
4028 /* memory target */
4029 IEM_MC_BEGIN(0, 1);
4030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4032 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4033 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4034 } IEM_MC_ELSE() {
4035 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4036 } IEM_MC_ENDIF();
4037 IEM_MC_ADVANCE_RIP();
4038 IEM_MC_END();
4039 }
4040 return VINF_SUCCESS;
4041}
4042
4043
4044/** Opcode 0x0f 0x98. */
4045FNIEMOP_DEF(iemOp_sets_Eb)
4046{
4047 IEMOP_MNEMONIC("sets Eb");
4048 IEMOP_HLP_MIN_386();
4049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4050 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4051
4052 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4053 * any way. AMD says it's "unused", whatever that means. We're
4054 * ignoring for now. */
4055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4056 {
4057 /* register target */
4058 IEM_MC_BEGIN(0, 0);
4059 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4060 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4061 } IEM_MC_ELSE() {
4062 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4063 } IEM_MC_ENDIF();
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 }
4067 else
4068 {
4069 /* memory target */
4070 IEM_MC_BEGIN(0, 1);
4071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4074 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4075 } IEM_MC_ELSE() {
4076 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4077 } IEM_MC_ENDIF();
4078 IEM_MC_ADVANCE_RIP();
4079 IEM_MC_END();
4080 }
4081 return VINF_SUCCESS;
4082}
4083
4084
4085/** Opcode 0x0f 0x99. */
4086FNIEMOP_DEF(iemOp_setns_Eb)
4087{
4088 IEMOP_MNEMONIC("setns Eb");
4089 IEMOP_HLP_MIN_386();
4090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4091 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4092
4093 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4094 * any way. AMD says it's "unused", whatever that means. We're
4095 * ignoring for now. */
4096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4097 {
4098 /* register target */
4099 IEM_MC_BEGIN(0, 0);
4100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4101 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4102 } IEM_MC_ELSE() {
4103 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4104 } IEM_MC_ENDIF();
4105 IEM_MC_ADVANCE_RIP();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 /* memory target */
4111 IEM_MC_BEGIN(0, 1);
4112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4115 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4118 } IEM_MC_ENDIF();
4119 IEM_MC_ADVANCE_RIP();
4120 IEM_MC_END();
4121 }
4122 return VINF_SUCCESS;
4123}
4124
4125
4126/** Opcode 0x0f 0x9a. */
4127FNIEMOP_DEF(iemOp_setp_Eb)
4128{
4129 IEMOP_MNEMONIC("setnp Eb");
4130 IEMOP_HLP_MIN_386();
4131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4132 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4133
4134 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4135 * any way. AMD says it's "unused", whatever that means. We're
4136 * ignoring for now. */
4137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4138 {
4139 /* register target */
4140 IEM_MC_BEGIN(0, 0);
4141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4142 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4143 } IEM_MC_ELSE() {
4144 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4145 } IEM_MC_ENDIF();
4146 IEM_MC_ADVANCE_RIP();
4147 IEM_MC_END();
4148 }
4149 else
4150 {
4151 /* memory target */
4152 IEM_MC_BEGIN(0, 1);
4153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4155 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4156 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4157 } IEM_MC_ELSE() {
4158 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4159 } IEM_MC_ENDIF();
4160 IEM_MC_ADVANCE_RIP();
4161 IEM_MC_END();
4162 }
4163 return VINF_SUCCESS;
4164}
4165
4166
4167/** Opcode 0x0f 0x9b. */
4168FNIEMOP_DEF(iemOp_setnp_Eb)
4169{
4170 IEMOP_MNEMONIC("setnp Eb");
4171 IEMOP_HLP_MIN_386();
4172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4173 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4174
4175 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4176 * any way. AMD says it's "unused", whatever that means. We're
4177 * ignoring for now. */
4178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4179 {
4180 /* register target */
4181 IEM_MC_BEGIN(0, 0);
4182 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4183 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4184 } IEM_MC_ELSE() {
4185 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4186 } IEM_MC_ENDIF();
4187 IEM_MC_ADVANCE_RIP();
4188 IEM_MC_END();
4189 }
4190 else
4191 {
4192 /* memory target */
4193 IEM_MC_BEGIN(0, 1);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4196 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4197 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4198 } IEM_MC_ELSE() {
4199 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4200 } IEM_MC_ENDIF();
4201 IEM_MC_ADVANCE_RIP();
4202 IEM_MC_END();
4203 }
4204 return VINF_SUCCESS;
4205}
4206
4207
4208/** Opcode 0x0f 0x9c. */
4209FNIEMOP_DEF(iemOp_setl_Eb)
4210{
4211 IEMOP_MNEMONIC("setl Eb");
4212 IEMOP_HLP_MIN_386();
4213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4214 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4215
4216 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4217 * any way. AMD says it's "unused", whatever that means. We're
4218 * ignoring for now. */
4219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4220 {
4221 /* register target */
4222 IEM_MC_BEGIN(0, 0);
4223 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4224 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4225 } IEM_MC_ELSE() {
4226 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4227 } IEM_MC_ENDIF();
4228 IEM_MC_ADVANCE_RIP();
4229 IEM_MC_END();
4230 }
4231 else
4232 {
4233 /* memory target */
4234 IEM_MC_BEGIN(0, 1);
4235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4237 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4238 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4241 } IEM_MC_ENDIF();
4242 IEM_MC_ADVANCE_RIP();
4243 IEM_MC_END();
4244 }
4245 return VINF_SUCCESS;
4246}
4247
4248
4249/** Opcode 0x0f 0x9d. */
4250FNIEMOP_DEF(iemOp_setnl_Eb)
4251{
4252 IEMOP_MNEMONIC("setnl Eb");
4253 IEMOP_HLP_MIN_386();
4254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4255 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4256
4257 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4258 * any way. AMD says it's "unused", whatever that means. We're
4259 * ignoring for now. */
4260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4261 {
4262 /* register target */
4263 IEM_MC_BEGIN(0, 0);
4264 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4265 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4266 } IEM_MC_ELSE() {
4267 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4268 } IEM_MC_ENDIF();
4269 IEM_MC_ADVANCE_RIP();
4270 IEM_MC_END();
4271 }
4272 else
4273 {
4274 /* memory target */
4275 IEM_MC_BEGIN(0, 1);
4276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4278 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4279 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4280 } IEM_MC_ELSE() {
4281 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4282 } IEM_MC_ENDIF();
4283 IEM_MC_ADVANCE_RIP();
4284 IEM_MC_END();
4285 }
4286 return VINF_SUCCESS;
4287}
4288
4289
4290/** Opcode 0x0f 0x9e. */
4291FNIEMOP_DEF(iemOp_setle_Eb)
4292{
4293 IEMOP_MNEMONIC("setle Eb");
4294 IEMOP_HLP_MIN_386();
4295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4296 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4297
4298 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4299 * any way. AMD says it's "unused", whatever that means. We're
4300 * ignoring for now. */
4301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4302 {
4303 /* register target */
4304 IEM_MC_BEGIN(0, 0);
4305 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4306 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4307 } IEM_MC_ELSE() {
4308 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4309 } IEM_MC_ENDIF();
4310 IEM_MC_ADVANCE_RIP();
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 /* memory target */
4316 IEM_MC_BEGIN(0, 1);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4319 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4320 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4321 } IEM_MC_ELSE() {
4322 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4323 } IEM_MC_ENDIF();
4324 IEM_MC_ADVANCE_RIP();
4325 IEM_MC_END();
4326 }
4327 return VINF_SUCCESS;
4328}
4329
4330
4331/** Opcode 0x0f 0x9f. */
4332FNIEMOP_DEF(iemOp_setnle_Eb)
4333{
4334 IEMOP_MNEMONIC("setnle Eb");
4335 IEMOP_HLP_MIN_386();
4336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4337 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4338
4339 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4340 * any way. AMD says it's "unused", whatever that means. We're
4341 * ignoring for now. */
4342 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4343 {
4344 /* register target */
4345 IEM_MC_BEGIN(0, 0);
4346 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4347 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4348 } IEM_MC_ELSE() {
4349 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4350 } IEM_MC_ENDIF();
4351 IEM_MC_ADVANCE_RIP();
4352 IEM_MC_END();
4353 }
4354 else
4355 {
4356 /* memory target */
4357 IEM_MC_BEGIN(0, 1);
4358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4360 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4361 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4362 } IEM_MC_ELSE() {
4363 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4364 } IEM_MC_ENDIF();
4365 IEM_MC_ADVANCE_RIP();
4366 IEM_MC_END();
4367 }
4368 return VINF_SUCCESS;
4369}
4370
4371
4372/**
4373 * Common 'push segment-register' helper.
4374 */
4375FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4376{
4377 IEMOP_HLP_NO_LOCK_PREFIX();
4378 if (iReg < X86_SREG_FS)
4379 IEMOP_HLP_NO_64BIT();
4380 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4381
4382 switch (pIemCpu->enmEffOpSize)
4383 {
4384 case IEMMODE_16BIT:
4385 IEM_MC_BEGIN(0, 1);
4386 IEM_MC_LOCAL(uint16_t, u16Value);
4387 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4388 IEM_MC_PUSH_U16(u16Value);
4389 IEM_MC_ADVANCE_RIP();
4390 IEM_MC_END();
4391 break;
4392
4393 case IEMMODE_32BIT:
4394 IEM_MC_BEGIN(0, 1);
4395 IEM_MC_LOCAL(uint32_t, u32Value);
4396 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4397 IEM_MC_PUSH_U32_SREG(u32Value);
4398 IEM_MC_ADVANCE_RIP();
4399 IEM_MC_END();
4400 break;
4401
4402 case IEMMODE_64BIT:
4403 IEM_MC_BEGIN(0, 1);
4404 IEM_MC_LOCAL(uint64_t, u64Value);
4405 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4406 IEM_MC_PUSH_U64(u64Value);
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 break;
4410 }
4411
4412 return VINF_SUCCESS;
4413}
4414
4415
4416/** Opcode 0x0f 0xa0. */
4417FNIEMOP_DEF(iemOp_push_fs)
4418{
4419 IEMOP_MNEMONIC("push fs");
4420 IEMOP_HLP_MIN_386();
4421 IEMOP_HLP_NO_LOCK_PREFIX();
4422 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4423}
4424
4425
4426/** Opcode 0x0f 0xa1. */
4427FNIEMOP_DEF(iemOp_pop_fs)
4428{
4429 IEMOP_MNEMONIC("pop fs");
4430 IEMOP_HLP_MIN_386();
4431 IEMOP_HLP_NO_LOCK_PREFIX();
4432 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4433}
4434
4435
4436/** Opcode 0x0f 0xa2. */
4437FNIEMOP_DEF(iemOp_cpuid)
4438{
4439 IEMOP_MNEMONIC("cpuid");
4440 IEMOP_HLP_MIN_486(); /* not all 486es. */
4441 IEMOP_HLP_NO_LOCK_PREFIX();
4442 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4443}
4444
4445
4446/**
4447 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4448 * iemOp_bts_Ev_Gv.
4449 */
4450FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4451{
4452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4453 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4454
4455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4456 {
4457 /* register destination. */
4458 IEMOP_HLP_NO_LOCK_PREFIX();
4459 switch (pIemCpu->enmEffOpSize)
4460 {
4461 case IEMMODE_16BIT:
4462 IEM_MC_BEGIN(3, 0);
4463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4464 IEM_MC_ARG(uint16_t, u16Src, 1);
4465 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4466
4467 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4468 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4469 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4470 IEM_MC_REF_EFLAGS(pEFlags);
4471 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4472
4473 IEM_MC_ADVANCE_RIP();
4474 IEM_MC_END();
4475 return VINF_SUCCESS;
4476
4477 case IEMMODE_32BIT:
4478 IEM_MC_BEGIN(3, 0);
4479 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4480 IEM_MC_ARG(uint32_t, u32Src, 1);
4481 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4482
4483 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4484 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4485 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4486 IEM_MC_REF_EFLAGS(pEFlags);
4487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4488
4489 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4490 IEM_MC_ADVANCE_RIP();
4491 IEM_MC_END();
4492 return VINF_SUCCESS;
4493
4494 case IEMMODE_64BIT:
4495 IEM_MC_BEGIN(3, 0);
4496 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4497 IEM_MC_ARG(uint64_t, u64Src, 1);
4498 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4499
4500 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4501 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4502 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4503 IEM_MC_REF_EFLAGS(pEFlags);
4504 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4505
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 return VINF_SUCCESS;
4509
4510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4511 }
4512 }
4513 else
4514 {
4515 /* memory destination. */
4516
4517 uint32_t fAccess;
4518 if (pImpl->pfnLockedU16)
4519 fAccess = IEM_ACCESS_DATA_RW;
4520 else /* BT */
4521 {
4522 IEMOP_HLP_NO_LOCK_PREFIX();
4523 fAccess = IEM_ACCESS_DATA_R;
4524 }
4525
4526 NOREF(fAccess);
4527
4528 /** @todo test negative bit offsets! */
4529 switch (pIemCpu->enmEffOpSize)
4530 {
4531 case IEMMODE_16BIT:
4532 IEM_MC_BEGIN(3, 2);
4533 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4534 IEM_MC_ARG(uint16_t, u16Src, 1);
4535 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4537 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4538
4539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4540 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4541 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4542 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4543 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4544 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4545 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4546 IEM_MC_FETCH_EFLAGS(EFlags);
4547
4548 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4549 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4550 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4551 else
4552 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4553 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4554
4555 IEM_MC_COMMIT_EFLAGS(EFlags);
4556 IEM_MC_ADVANCE_RIP();
4557 IEM_MC_END();
4558 return VINF_SUCCESS;
4559
4560 case IEMMODE_32BIT:
4561 IEM_MC_BEGIN(3, 2);
4562 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4563 IEM_MC_ARG(uint32_t, u32Src, 1);
4564 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4566 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4567
4568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4569 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4570 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4571 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4572 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4573 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4574 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4575 IEM_MC_FETCH_EFLAGS(EFlags);
4576
4577 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4578 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4579 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4580 else
4581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4582 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4583
4584 IEM_MC_COMMIT_EFLAGS(EFlags);
4585 IEM_MC_ADVANCE_RIP();
4586 IEM_MC_END();
4587 return VINF_SUCCESS;
4588
4589 case IEMMODE_64BIT:
4590 IEM_MC_BEGIN(3, 2);
4591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4592 IEM_MC_ARG(uint64_t, u64Src, 1);
4593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4595 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4596
4597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4598 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4599 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4600 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4601 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4602 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4603 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4604 IEM_MC_FETCH_EFLAGS(EFlags);
4605
4606 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4607 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4609 else
4610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4611 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4612
4613 IEM_MC_COMMIT_EFLAGS(EFlags);
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 return VINF_SUCCESS;
4617
4618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4619 }
4620 }
4621}
4622
4623
4624/** Opcode 0x0f 0xa3. */
4625FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4626{
4627 IEMOP_MNEMONIC("bt Gv,Gv");
4628 IEMOP_HLP_MIN_386();
4629 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4630}
4631
4632
4633/**
4634 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4635 */
4636FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4637{
4638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4639 IEMOP_HLP_NO_LOCK_PREFIX();
4640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4641
4642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4643 {
4644 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4645 IEMOP_HLP_NO_LOCK_PREFIX();
4646
4647 switch (pIemCpu->enmEffOpSize)
4648 {
4649 case IEMMODE_16BIT:
4650 IEM_MC_BEGIN(4, 0);
4651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4652 IEM_MC_ARG(uint16_t, u16Src, 1);
4653 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4654 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4655
4656 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4657 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4658 IEM_MC_REF_EFLAGS(pEFlags);
4659 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4660
4661 IEM_MC_ADVANCE_RIP();
4662 IEM_MC_END();
4663 return VINF_SUCCESS;
4664
4665 case IEMMODE_32BIT:
4666 IEM_MC_BEGIN(4, 0);
4667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4668 IEM_MC_ARG(uint32_t, u32Src, 1);
4669 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4670 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4671
4672 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4673 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4674 IEM_MC_REF_EFLAGS(pEFlags);
4675 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4676
4677 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 return VINF_SUCCESS;
4681
4682 case IEMMODE_64BIT:
4683 IEM_MC_BEGIN(4, 0);
4684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4685 IEM_MC_ARG(uint64_t, u64Src, 1);
4686 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4687 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4688
4689 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4690 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4691 IEM_MC_REF_EFLAGS(pEFlags);
4692 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4693
4694 IEM_MC_ADVANCE_RIP();
4695 IEM_MC_END();
4696 return VINF_SUCCESS;
4697
4698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4699 }
4700 }
4701 else
4702 {
4703 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4704
4705 switch (pIemCpu->enmEffOpSize)
4706 {
4707 case IEMMODE_16BIT:
4708 IEM_MC_BEGIN(4, 2);
4709 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4710 IEM_MC_ARG(uint16_t, u16Src, 1);
4711 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4712 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4714
4715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4716 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4717 IEM_MC_ASSIGN(cShiftArg, cShift);
4718 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4719 IEM_MC_FETCH_EFLAGS(EFlags);
4720 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4721 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4722
4723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4724 IEM_MC_COMMIT_EFLAGS(EFlags);
4725 IEM_MC_ADVANCE_RIP();
4726 IEM_MC_END();
4727 return VINF_SUCCESS;
4728
4729 case IEMMODE_32BIT:
4730 IEM_MC_BEGIN(4, 2);
4731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4732 IEM_MC_ARG(uint32_t, u32Src, 1);
4733 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4734 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4736
4737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4738 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4739 IEM_MC_ASSIGN(cShiftArg, cShift);
4740 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4741 IEM_MC_FETCH_EFLAGS(EFlags);
4742 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4743 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4744
4745 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4746 IEM_MC_COMMIT_EFLAGS(EFlags);
4747 IEM_MC_ADVANCE_RIP();
4748 IEM_MC_END();
4749 return VINF_SUCCESS;
4750
4751 case IEMMODE_64BIT:
4752 IEM_MC_BEGIN(4, 2);
4753 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4754 IEM_MC_ARG(uint64_t, u64Src, 1);
4755 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4756 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4758
4759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4760 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4761 IEM_MC_ASSIGN(cShiftArg, cShift);
4762 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4763 IEM_MC_FETCH_EFLAGS(EFlags);
4764 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4765 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4766
4767 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4768 IEM_MC_COMMIT_EFLAGS(EFlags);
4769 IEM_MC_ADVANCE_RIP();
4770 IEM_MC_END();
4771 return VINF_SUCCESS;
4772
4773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4774 }
4775 }
4776}
4777
4778
4779/**
4780 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4781 */
4782FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4783{
4784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4785 IEMOP_HLP_NO_LOCK_PREFIX();
4786 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4787
4788 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4789 {
4790 IEMOP_HLP_NO_LOCK_PREFIX();
4791
4792 switch (pIemCpu->enmEffOpSize)
4793 {
4794 case IEMMODE_16BIT:
4795 IEM_MC_BEGIN(4, 0);
4796 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4797 IEM_MC_ARG(uint16_t, u16Src, 1);
4798 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4799 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4800
4801 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4802 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4803 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4804 IEM_MC_REF_EFLAGS(pEFlags);
4805 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4806
4807 IEM_MC_ADVANCE_RIP();
4808 IEM_MC_END();
4809 return VINF_SUCCESS;
4810
4811 case IEMMODE_32BIT:
4812 IEM_MC_BEGIN(4, 0);
4813 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4814 IEM_MC_ARG(uint32_t, u32Src, 1);
4815 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4816 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4817
4818 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4819 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4820 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4821 IEM_MC_REF_EFLAGS(pEFlags);
4822 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4823
4824 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4825 IEM_MC_ADVANCE_RIP();
4826 IEM_MC_END();
4827 return VINF_SUCCESS;
4828
4829 case IEMMODE_64BIT:
4830 IEM_MC_BEGIN(4, 0);
4831 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4832 IEM_MC_ARG(uint64_t, u64Src, 1);
4833 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4834 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4835
4836 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4837 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4838 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4839 IEM_MC_REF_EFLAGS(pEFlags);
4840 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4841
4842 IEM_MC_ADVANCE_RIP();
4843 IEM_MC_END();
4844 return VINF_SUCCESS;
4845
4846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4847 }
4848 }
4849 else
4850 {
4851 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4852
4853 switch (pIemCpu->enmEffOpSize)
4854 {
4855 case IEMMODE_16BIT:
4856 IEM_MC_BEGIN(4, 2);
4857 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4858 IEM_MC_ARG(uint16_t, u16Src, 1);
4859 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4862
4863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4864 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4865 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4866 IEM_MC_FETCH_EFLAGS(EFlags);
4867 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4868 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4869
4870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4871 IEM_MC_COMMIT_EFLAGS(EFlags);
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 return VINF_SUCCESS;
4875
4876 case IEMMODE_32BIT:
4877 IEM_MC_BEGIN(4, 2);
4878 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4879 IEM_MC_ARG(uint32_t, u32Src, 1);
4880 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4883
4884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4885 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4886 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4887 IEM_MC_FETCH_EFLAGS(EFlags);
4888 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4889 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4890
4891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4892 IEM_MC_COMMIT_EFLAGS(EFlags);
4893 IEM_MC_ADVANCE_RIP();
4894 IEM_MC_END();
4895 return VINF_SUCCESS;
4896
4897 case IEMMODE_64BIT:
4898 IEM_MC_BEGIN(4, 2);
4899 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4900 IEM_MC_ARG(uint64_t, u64Src, 1);
4901 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4902 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4904
4905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4906 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4907 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4908 IEM_MC_FETCH_EFLAGS(EFlags);
4909 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4910 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4911
4912 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4913 IEM_MC_COMMIT_EFLAGS(EFlags);
4914 IEM_MC_ADVANCE_RIP();
4915 IEM_MC_END();
4916 return VINF_SUCCESS;
4917
4918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4919 }
4920 }
4921}
4922
4923
4924
4925/** Opcode 0x0f 0xa4. */
4926FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4927{
4928 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4929 IEMOP_HLP_MIN_386();
4930 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4931}
4932
4933
4934/** Opcode 0x0f 0xa5. */
4935FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4936{
4937 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4938 IEMOP_HLP_MIN_386();
4939 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4940}
4941
4942
4943/** Opcode 0x0f 0xa8. */
4944FNIEMOP_DEF(iemOp_push_gs)
4945{
4946 IEMOP_MNEMONIC("push gs");
4947 IEMOP_HLP_MIN_386();
4948 IEMOP_HLP_NO_LOCK_PREFIX();
4949 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4950}
4951
4952
4953/** Opcode 0x0f 0xa9. */
4954FNIEMOP_DEF(iemOp_pop_gs)
4955{
4956 IEMOP_MNEMONIC("pop gs");
4957 IEMOP_HLP_MIN_386();
4958 IEMOP_HLP_NO_LOCK_PREFIX();
4959 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4960}
4961
4962
4963/** Opcode 0x0f 0xaa. */
4964FNIEMOP_STUB(iemOp_rsm);
4965//IEMOP_HLP_MIN_386();
4966
4967
4968/** Opcode 0x0f 0xab. */
4969FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4970{
4971 IEMOP_MNEMONIC("bts Ev,Gv");
4972 IEMOP_HLP_MIN_386();
4973 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4974}
4975
4976
4977/** Opcode 0x0f 0xac. */
4978FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4979{
4980 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4981 IEMOP_HLP_MIN_386();
4982 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4983}
4984
4985
4986/** Opcode 0x0f 0xad. */
4987FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4988{
4989 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4990 IEMOP_HLP_MIN_386();
4991 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4992}
4993
4994
4995/** Opcode 0x0f 0xae mem/0. */
4996FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
4997{
4998 IEMOP_MNEMONIC("fxsave m512");
4999 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5000 return IEMOP_RAISE_INVALID_OPCODE();
5001
5002 IEM_MC_BEGIN(3, 1);
5003 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5004 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5005 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5009 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5010 IEM_MC_END();
5011 return VINF_SUCCESS;
5012}
5013
5014
5015/** Opcode 0x0f 0xae mem/1. */
5016FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5017{
5018 IEMOP_MNEMONIC("fxrstor m512");
5019 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5020 return IEMOP_RAISE_INVALID_OPCODE();
5021
5022 IEM_MC_BEGIN(3, 1);
5023 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5024 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5025 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5028 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5029 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5030 IEM_MC_END();
5031 return VINF_SUCCESS;
5032}
5033
5034
5035/** Opcode 0x0f 0xae mem/2. */
5036FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5037
5038/** Opcode 0x0f 0xae mem/3. */
5039FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5040
5041/** Opcode 0x0f 0xae mem/4. */
5042FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5043
5044/** Opcode 0x0f 0xae mem/5. */
5045FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5046
5047/** Opcode 0x0f 0xae mem/6. */
5048FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5049
5050/** Opcode 0x0f 0xae mem/7. */
5051FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5052
5053
5054/** Opcode 0x0f 0xae 11b/5. */
5055FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5056{
5057 IEMOP_MNEMONIC("lfence");
5058 IEMOP_HLP_NO_LOCK_PREFIX();
5059 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5060 return IEMOP_RAISE_INVALID_OPCODE();
5061
5062 IEM_MC_BEGIN(0, 0);
5063 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5064 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5065 else
5066 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 return VINF_SUCCESS;
5070}
5071
5072
5073/** Opcode 0x0f 0xae 11b/6. */
5074FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5075{
5076 IEMOP_MNEMONIC("mfence");
5077 IEMOP_HLP_NO_LOCK_PREFIX();
5078 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5079 return IEMOP_RAISE_INVALID_OPCODE();
5080
5081 IEM_MC_BEGIN(0, 0);
5082 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5083 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5084 else
5085 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5086 IEM_MC_ADVANCE_RIP();
5087 IEM_MC_END();
5088 return VINF_SUCCESS;
5089}
5090
5091
5092/** Opcode 0x0f 0xae 11b/7. */
5093FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5094{
5095 IEMOP_MNEMONIC("sfence");
5096 IEMOP_HLP_NO_LOCK_PREFIX();
5097 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5098 return IEMOP_RAISE_INVALID_OPCODE();
5099
5100 IEM_MC_BEGIN(0, 0);
5101 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5102 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5103 else
5104 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5105 IEM_MC_ADVANCE_RIP();
5106 IEM_MC_END();
5107 return VINF_SUCCESS;
5108}
5109
5110
5111/** Opcode 0xf3 0x0f 0xae 11b/0. */
5112FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5113
5114/** Opcode 0xf3 0x0f 0xae 11b/1. */
5115FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5116
5117/** Opcode 0xf3 0x0f 0xae 11b/2. */
5118FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5119
5120/** Opcode 0xf3 0x0f 0xae 11b/3. */
5121FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5122
5123
5124/** Opcode 0x0f 0xae. */
5125FNIEMOP_DEF(iemOp_Grp15)
5126{
5127 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5129 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5130 {
5131 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5132 {
5133 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5134 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5135 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5136 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5137 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5138 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5139 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5140 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5142 }
5143 }
5144 else
5145 {
5146 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5147 {
5148 case 0:
5149 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5150 {
5151 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5152 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5153 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5154 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5155 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5156 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5157 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5158 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5160 }
5161 break;
5162
5163 case IEM_OP_PRF_REPZ:
5164 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5165 {
5166 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5167 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5168 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5169 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5170 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5171 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5172 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5173 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5175 }
5176 break;
5177
5178 default:
5179 return IEMOP_RAISE_INVALID_OPCODE();
5180 }
5181 }
5182}
5183
5184
5185/** Opcode 0x0f 0xaf. */
5186FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5187{
5188 IEMOP_MNEMONIC("imul Gv,Ev");
5189 IEMOP_HLP_MIN_386();
5190 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5191 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5192}
5193
5194
5195/** Opcode 0x0f 0xb0. */
5196FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5197{
5198 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5199 IEMOP_HLP_MIN_486();
5200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5201
5202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5203 {
5204 IEMOP_HLP_DONE_DECODING();
5205 IEM_MC_BEGIN(4, 0);
5206 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5207 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5208 IEM_MC_ARG(uint8_t, u8Src, 2);
5209 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5210
5211 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5212 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5213 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5214 IEM_MC_REF_EFLAGS(pEFlags);
5215 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5216 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5217 else
5218 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5219
5220 IEM_MC_ADVANCE_RIP();
5221 IEM_MC_END();
5222 }
5223 else
5224 {
5225 IEM_MC_BEGIN(4, 3);
5226 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5227 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5228 IEM_MC_ARG(uint8_t, u8Src, 2);
5229 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5231 IEM_MC_LOCAL(uint8_t, u8Al);
5232
5233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5234 IEMOP_HLP_DONE_DECODING();
5235 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5236 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5237 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5238 IEM_MC_FETCH_EFLAGS(EFlags);
5239 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5240 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5242 else
5243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5244
5245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5246 IEM_MC_COMMIT_EFLAGS(EFlags);
5247 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5248 IEM_MC_ADVANCE_RIP();
5249 IEM_MC_END();
5250 }
5251 return VINF_SUCCESS;
5252}
5253
5254/** Opcode 0x0f 0xb1. */
5255FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5256{
5257 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5258 IEMOP_HLP_MIN_486();
5259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5260
5261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5262 {
5263 IEMOP_HLP_DONE_DECODING();
5264 switch (pIemCpu->enmEffOpSize)
5265 {
5266 case IEMMODE_16BIT:
5267 IEM_MC_BEGIN(4, 0);
5268 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5269 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5270 IEM_MC_ARG(uint16_t, u16Src, 2);
5271 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5272
5273 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5274 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5275 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5276 IEM_MC_REF_EFLAGS(pEFlags);
5277 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5278 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5279 else
5280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5281
5282 IEM_MC_ADVANCE_RIP();
5283 IEM_MC_END();
5284 return VINF_SUCCESS;
5285
5286 case IEMMODE_32BIT:
5287 IEM_MC_BEGIN(4, 0);
5288 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5289 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5290 IEM_MC_ARG(uint32_t, u32Src, 2);
5291 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5292
5293 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5294 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5295 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5296 IEM_MC_REF_EFLAGS(pEFlags);
5297 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5299 else
5300 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5301
5302 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5303 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5304 IEM_MC_ADVANCE_RIP();
5305 IEM_MC_END();
5306 return VINF_SUCCESS;
5307
5308 case IEMMODE_64BIT:
5309 IEM_MC_BEGIN(4, 0);
5310 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5311 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5312#ifdef RT_ARCH_X86
5313 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5314#else
5315 IEM_MC_ARG(uint64_t, u64Src, 2);
5316#endif
5317 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5318
5319 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5320 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5321 IEM_MC_REF_EFLAGS(pEFlags);
5322#ifdef RT_ARCH_X86
5323 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5324 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5326 else
5327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5328#else
5329 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5330 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5331 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5332 else
5333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5334#endif
5335
5336 IEM_MC_ADVANCE_RIP();
5337 IEM_MC_END();
5338 return VINF_SUCCESS;
5339
5340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5341 }
5342 }
5343 else
5344 {
5345 switch (pIemCpu->enmEffOpSize)
5346 {
5347 case IEMMODE_16BIT:
5348 IEM_MC_BEGIN(4, 3);
5349 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5350 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5351 IEM_MC_ARG(uint16_t, u16Src, 2);
5352 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5354 IEM_MC_LOCAL(uint16_t, u16Ax);
5355
5356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5357 IEMOP_HLP_DONE_DECODING();
5358 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5359 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5360 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5361 IEM_MC_FETCH_EFLAGS(EFlags);
5362 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5363 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5365 else
5366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5367
5368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5369 IEM_MC_COMMIT_EFLAGS(EFlags);
5370 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 return VINF_SUCCESS;
5374
5375 case IEMMODE_32BIT:
5376 IEM_MC_BEGIN(4, 3);
5377 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5378 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5379 IEM_MC_ARG(uint32_t, u32Src, 2);
5380 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5382 IEM_MC_LOCAL(uint32_t, u32Eax);
5383
5384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5385 IEMOP_HLP_DONE_DECODING();
5386 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5387 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5388 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5389 IEM_MC_FETCH_EFLAGS(EFlags);
5390 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5391 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5392 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5393 else
5394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5395
5396 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5397 IEM_MC_COMMIT_EFLAGS(EFlags);
5398 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5399 IEM_MC_ADVANCE_RIP();
5400 IEM_MC_END();
5401 return VINF_SUCCESS;
5402
5403 case IEMMODE_64BIT:
5404 IEM_MC_BEGIN(4, 3);
5405 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5406 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5407#ifdef RT_ARCH_X86
5408 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5409#else
5410 IEM_MC_ARG(uint64_t, u64Src, 2);
5411#endif
5412 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5414 IEM_MC_LOCAL(uint64_t, u64Rax);
5415
5416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5417 IEMOP_HLP_DONE_DECODING();
5418 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5419 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5420 IEM_MC_FETCH_EFLAGS(EFlags);
5421 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5422#ifdef RT_ARCH_X86
5423 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5424 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5425 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5426 else
5427 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5428#else
5429 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5430 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5431 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5432 else
5433 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5434#endif
5435
5436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5437 IEM_MC_COMMIT_EFLAGS(EFlags);
5438 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5439 IEM_MC_ADVANCE_RIP();
5440 IEM_MC_END();
5441 return VINF_SUCCESS;
5442
5443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5444 }
5445 }
5446}
5447
5448
5449FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5450{
5451 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5452 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5453
5454 switch (pIemCpu->enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(5, 1);
5458 IEM_MC_ARG(uint16_t, uSel, 0);
5459 IEM_MC_ARG(uint16_t, offSeg, 1);
5460 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5461 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5462 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5463 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5467 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5468 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5469 IEM_MC_END();
5470 return VINF_SUCCESS;
5471
5472 case IEMMODE_32BIT:
5473 IEM_MC_BEGIN(5, 1);
5474 IEM_MC_ARG(uint16_t, uSel, 0);
5475 IEM_MC_ARG(uint32_t, offSeg, 1);
5476 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5477 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5483 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5484 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 case IEMMODE_64BIT:
5489 IEM_MC_BEGIN(5, 1);
5490 IEM_MC_ARG(uint16_t, uSel, 0);
5491 IEM_MC_ARG(uint64_t, offSeg, 1);
5492 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5493 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5494 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5495 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5499 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5500 else
5501 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5502 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5503 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5504 IEM_MC_END();
5505 return VINF_SUCCESS;
5506
5507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5508 }
5509}
5510
5511
5512/** Opcode 0x0f 0xb2. */
5513FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5514{
5515 IEMOP_MNEMONIC("lss Gv,Mp");
5516 IEMOP_HLP_MIN_386();
5517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5518 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5519 return IEMOP_RAISE_INVALID_OPCODE();
5520 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5521}
5522
5523
5524/** Opcode 0x0f 0xb3. */
5525FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5526{
5527 IEMOP_MNEMONIC("btr Ev,Gv");
5528 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5529}
5530
5531
5532/** Opcode 0x0f 0xb4. */
5533FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5534{
5535 IEMOP_MNEMONIC("lfs Gv,Mp");
5536 IEMOP_HLP_MIN_386();
5537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5539 return IEMOP_RAISE_INVALID_OPCODE();
5540 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5541}
5542
5543
5544/** Opcode 0x0f 0xb5. */
5545FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5546{
5547 IEMOP_MNEMONIC("lgs Gv,Mp");
5548 IEMOP_HLP_MIN_386();
5549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5551 return IEMOP_RAISE_INVALID_OPCODE();
5552 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5553}
5554
5555
5556/** Opcode 0x0f 0xb6. */
5557FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5558{
5559 IEMOP_MNEMONIC("movzx Gv,Eb");
5560 IEMOP_HLP_MIN_386();
5561
5562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5563 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5564
5565 /*
5566 * If rm is denoting a register, no more instruction bytes.
5567 */
5568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5569 {
5570 switch (pIemCpu->enmEffOpSize)
5571 {
5572 case IEMMODE_16BIT:
5573 IEM_MC_BEGIN(0, 1);
5574 IEM_MC_LOCAL(uint16_t, u16Value);
5575 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5576 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 return VINF_SUCCESS;
5580
5581 case IEMMODE_32BIT:
5582 IEM_MC_BEGIN(0, 1);
5583 IEM_MC_LOCAL(uint32_t, u32Value);
5584 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5585 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5586 IEM_MC_ADVANCE_RIP();
5587 IEM_MC_END();
5588 return VINF_SUCCESS;
5589
5590 case IEMMODE_64BIT:
5591 IEM_MC_BEGIN(0, 1);
5592 IEM_MC_LOCAL(uint64_t, u64Value);
5593 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5594 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 return VINF_SUCCESS;
5598
5599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5600 }
5601 }
5602 else
5603 {
5604 /*
5605 * We're loading a register from memory.
5606 */
5607 switch (pIemCpu->enmEffOpSize)
5608 {
5609 case IEMMODE_16BIT:
5610 IEM_MC_BEGIN(0, 2);
5611 IEM_MC_LOCAL(uint16_t, u16Value);
5612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5614 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5615 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5616 IEM_MC_ADVANCE_RIP();
5617 IEM_MC_END();
5618 return VINF_SUCCESS;
5619
5620 case IEMMODE_32BIT:
5621 IEM_MC_BEGIN(0, 2);
5622 IEM_MC_LOCAL(uint32_t, u32Value);
5623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5625 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5626 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5627 IEM_MC_ADVANCE_RIP();
5628 IEM_MC_END();
5629 return VINF_SUCCESS;
5630
5631 case IEMMODE_64BIT:
5632 IEM_MC_BEGIN(0, 2);
5633 IEM_MC_LOCAL(uint64_t, u64Value);
5634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5636 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5637 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5638 IEM_MC_ADVANCE_RIP();
5639 IEM_MC_END();
5640 return VINF_SUCCESS;
5641
5642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5643 }
5644 }
5645}
5646
5647
5648/** Opcode 0x0f 0xb7. */
5649FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5650{
5651 IEMOP_MNEMONIC("movzx Gv,Ew");
5652 IEMOP_HLP_MIN_386();
5653
5654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5655 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5656
5657 /** @todo Not entirely sure how the operand size prefix is handled here,
5658 * assuming that it will be ignored. Would be nice to have a few
5659 * test for this. */
5660 /*
5661 * If rm is denoting a register, no more instruction bytes.
5662 */
5663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5664 {
5665 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5666 {
5667 IEM_MC_BEGIN(0, 1);
5668 IEM_MC_LOCAL(uint32_t, u32Value);
5669 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5670 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5671 IEM_MC_ADVANCE_RIP();
5672 IEM_MC_END();
5673 }
5674 else
5675 {
5676 IEM_MC_BEGIN(0, 1);
5677 IEM_MC_LOCAL(uint64_t, u64Value);
5678 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5679 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5680 IEM_MC_ADVANCE_RIP();
5681 IEM_MC_END();
5682 }
5683 }
5684 else
5685 {
5686 /*
5687 * We're loading a register from memory.
5688 */
5689 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5690 {
5691 IEM_MC_BEGIN(0, 2);
5692 IEM_MC_LOCAL(uint32_t, u32Value);
5693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5695 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5696 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5697 IEM_MC_ADVANCE_RIP();
5698 IEM_MC_END();
5699 }
5700 else
5701 {
5702 IEM_MC_BEGIN(0, 2);
5703 IEM_MC_LOCAL(uint64_t, u64Value);
5704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5706 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5707 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5708 IEM_MC_ADVANCE_RIP();
5709 IEM_MC_END();
5710 }
5711 }
5712 return VINF_SUCCESS;
5713}
5714
5715
5716/** Opcode 0x0f 0xb8. */
5717FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5718
5719
5720/** Opcode 0x0f 0xb9. */
5721FNIEMOP_DEF(iemOp_Grp10)
5722{
5723 Log(("iemOp_Grp10 -> #UD\n"));
5724 return IEMOP_RAISE_INVALID_OPCODE();
5725}
5726
5727
5728/** Opcode 0x0f 0xba. */
5729FNIEMOP_DEF(iemOp_Grp8)
5730{
5731 IEMOP_HLP_MIN_386();
5732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5733 PCIEMOPBINSIZES pImpl;
5734 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5735 {
5736 case 0: case 1: case 2: case 3:
5737 return IEMOP_RAISE_INVALID_OPCODE();
5738 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5739 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5740 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5741 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5743 }
5744 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5745
5746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5747 {
5748 /* register destination. */
5749 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5750 IEMOP_HLP_NO_LOCK_PREFIX();
5751
5752 switch (pIemCpu->enmEffOpSize)
5753 {
5754 case IEMMODE_16BIT:
5755 IEM_MC_BEGIN(3, 0);
5756 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5757 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5758 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5759
5760 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5761 IEM_MC_REF_EFLAGS(pEFlags);
5762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5763
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 return VINF_SUCCESS;
5767
5768 case IEMMODE_32BIT:
5769 IEM_MC_BEGIN(3, 0);
5770 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5771 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5773
5774 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5775 IEM_MC_REF_EFLAGS(pEFlags);
5776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5777
5778 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5779 IEM_MC_ADVANCE_RIP();
5780 IEM_MC_END();
5781 return VINF_SUCCESS;
5782
5783 case IEMMODE_64BIT:
5784 IEM_MC_BEGIN(3, 0);
5785 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5786 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5787 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5788
5789 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5790 IEM_MC_REF_EFLAGS(pEFlags);
5791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5792
5793 IEM_MC_ADVANCE_RIP();
5794 IEM_MC_END();
5795 return VINF_SUCCESS;
5796
5797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5798 }
5799 }
5800 else
5801 {
5802 /* memory destination. */
5803
5804 uint32_t fAccess;
5805 if (pImpl->pfnLockedU16)
5806 fAccess = IEM_ACCESS_DATA_RW;
5807 else /* BT */
5808 {
5809 IEMOP_HLP_NO_LOCK_PREFIX();
5810 fAccess = IEM_ACCESS_DATA_R;
5811 }
5812
5813 /** @todo test negative bit offsets! */
5814 switch (pIemCpu->enmEffOpSize)
5815 {
5816 case IEMMODE_16BIT:
5817 IEM_MC_BEGIN(3, 1);
5818 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5819 IEM_MC_ARG(uint16_t, u16Src, 1);
5820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5822
5823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5824 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5825 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5826 IEM_MC_FETCH_EFLAGS(EFlags);
5827 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5828 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5829 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5830 else
5831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5832 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5833
5834 IEM_MC_COMMIT_EFLAGS(EFlags);
5835 IEM_MC_ADVANCE_RIP();
5836 IEM_MC_END();
5837 return VINF_SUCCESS;
5838
5839 case IEMMODE_32BIT:
5840 IEM_MC_BEGIN(3, 1);
5841 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5842 IEM_MC_ARG(uint32_t, u32Src, 1);
5843 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5845
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5847 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5848 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5849 IEM_MC_FETCH_EFLAGS(EFlags);
5850 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5851 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5853 else
5854 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5855 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5856
5857 IEM_MC_COMMIT_EFLAGS(EFlags);
5858 IEM_MC_ADVANCE_RIP();
5859 IEM_MC_END();
5860 return VINF_SUCCESS;
5861
5862 case IEMMODE_64BIT:
5863 IEM_MC_BEGIN(3, 1);
5864 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5865 IEM_MC_ARG(uint64_t, u64Src, 1);
5866 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5868
5869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5870 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5871 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5872 IEM_MC_FETCH_EFLAGS(EFlags);
5873 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5874 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5876 else
5877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5879
5880 IEM_MC_COMMIT_EFLAGS(EFlags);
5881 IEM_MC_ADVANCE_RIP();
5882 IEM_MC_END();
5883 return VINF_SUCCESS;
5884
5885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5886 }
5887 }
5888
5889}
5890
5891
5892/** Opcode 0x0f 0xbb. */
5893FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5894{
5895 IEMOP_MNEMONIC("btc Ev,Gv");
5896 IEMOP_HLP_MIN_386();
5897 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5898}
5899
5900
5901/** Opcode 0x0f 0xbc. */
5902FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5903{
5904 IEMOP_MNEMONIC("bsf Gv,Ev");
5905 IEMOP_HLP_MIN_386();
5906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5907 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5908}
5909
5910
5911/** Opcode 0x0f 0xbd. */
5912FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5913{
5914 IEMOP_MNEMONIC("bsr Gv,Ev");
5915 IEMOP_HLP_MIN_386();
5916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5917 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5918}
5919
5920
5921/** Opcode 0x0f 0xbe. */
5922FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5923{
5924 IEMOP_MNEMONIC("movsx Gv,Eb");
5925 IEMOP_HLP_MIN_386();
5926
5927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5928 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5929
5930 /*
5931 * If rm is denoting a register, no more instruction bytes.
5932 */
5933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5934 {
5935 switch (pIemCpu->enmEffOpSize)
5936 {
5937 case IEMMODE_16BIT:
5938 IEM_MC_BEGIN(0, 1);
5939 IEM_MC_LOCAL(uint16_t, u16Value);
5940 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5941 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5942 IEM_MC_ADVANCE_RIP();
5943 IEM_MC_END();
5944 return VINF_SUCCESS;
5945
5946 case IEMMODE_32BIT:
5947 IEM_MC_BEGIN(0, 1);
5948 IEM_MC_LOCAL(uint32_t, u32Value);
5949 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5950 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5951 IEM_MC_ADVANCE_RIP();
5952 IEM_MC_END();
5953 return VINF_SUCCESS;
5954
5955 case IEMMODE_64BIT:
5956 IEM_MC_BEGIN(0, 1);
5957 IEM_MC_LOCAL(uint64_t, u64Value);
5958 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5959 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5960 IEM_MC_ADVANCE_RIP();
5961 IEM_MC_END();
5962 return VINF_SUCCESS;
5963
5964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5965 }
5966 }
5967 else
5968 {
5969 /*
5970 * We're loading a register from memory.
5971 */
5972 switch (pIemCpu->enmEffOpSize)
5973 {
5974 case IEMMODE_16BIT:
5975 IEM_MC_BEGIN(0, 2);
5976 IEM_MC_LOCAL(uint16_t, u16Value);
5977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5979 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5980 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5981 IEM_MC_ADVANCE_RIP();
5982 IEM_MC_END();
5983 return VINF_SUCCESS;
5984
5985 case IEMMODE_32BIT:
5986 IEM_MC_BEGIN(0, 2);
5987 IEM_MC_LOCAL(uint32_t, u32Value);
5988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5990 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5991 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5992 IEM_MC_ADVANCE_RIP();
5993 IEM_MC_END();
5994 return VINF_SUCCESS;
5995
5996 case IEMMODE_64BIT:
5997 IEM_MC_BEGIN(0, 2);
5998 IEM_MC_LOCAL(uint64_t, u64Value);
5999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6001 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6002 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6003 IEM_MC_ADVANCE_RIP();
6004 IEM_MC_END();
6005 return VINF_SUCCESS;
6006
6007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6008 }
6009 }
6010}
6011
6012
6013/** Opcode 0x0f 0xbf. */
6014FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6015{
6016 IEMOP_MNEMONIC("movsx Gv,Ew");
6017 IEMOP_HLP_MIN_386();
6018
6019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6020 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6021
6022 /** @todo Not entirely sure how the operand size prefix is handled here,
6023 * assuming that it will be ignored. Would be nice to have a few
6024 * test for this. */
6025 /*
6026 * If rm is denoting a register, no more instruction bytes.
6027 */
6028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6029 {
6030 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6031 {
6032 IEM_MC_BEGIN(0, 1);
6033 IEM_MC_LOCAL(uint32_t, u32Value);
6034 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6035 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6036 IEM_MC_ADVANCE_RIP();
6037 IEM_MC_END();
6038 }
6039 else
6040 {
6041 IEM_MC_BEGIN(0, 1);
6042 IEM_MC_LOCAL(uint64_t, u64Value);
6043 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6044 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6045 IEM_MC_ADVANCE_RIP();
6046 IEM_MC_END();
6047 }
6048 }
6049 else
6050 {
6051 /*
6052 * We're loading a register from memory.
6053 */
6054 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6055 {
6056 IEM_MC_BEGIN(0, 2);
6057 IEM_MC_LOCAL(uint32_t, u32Value);
6058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6060 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6061 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6062 IEM_MC_ADVANCE_RIP();
6063 IEM_MC_END();
6064 }
6065 else
6066 {
6067 IEM_MC_BEGIN(0, 2);
6068 IEM_MC_LOCAL(uint64_t, u64Value);
6069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6071 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6072 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6073 IEM_MC_ADVANCE_RIP();
6074 IEM_MC_END();
6075 }
6076 }
6077 return VINF_SUCCESS;
6078}
6079
6080
6081/** Opcode 0x0f 0xc0. */
6082FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6083{
6084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6085 IEMOP_HLP_MIN_486();
6086 IEMOP_MNEMONIC("xadd Eb,Gb");
6087
6088 /*
6089 * If rm is denoting a register, no more instruction bytes.
6090 */
6091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6092 {
6093 IEMOP_HLP_NO_LOCK_PREFIX();
6094
6095 IEM_MC_BEGIN(3, 0);
6096 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6097 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6098 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6099
6100 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6101 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6102 IEM_MC_REF_EFLAGS(pEFlags);
6103 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6104
6105 IEM_MC_ADVANCE_RIP();
6106 IEM_MC_END();
6107 }
6108 else
6109 {
6110 /*
6111 * We're accessing memory.
6112 */
6113 IEM_MC_BEGIN(3, 3);
6114 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6115 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6116 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6117 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6119
6120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6121 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6122 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6123 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6124 IEM_MC_FETCH_EFLAGS(EFlags);
6125 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6126 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6127 else
6128 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6129
6130 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6131 IEM_MC_COMMIT_EFLAGS(EFlags);
6132 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 return VINF_SUCCESS;
6136 }
6137 return VINF_SUCCESS;
6138}
6139
6140
6141/** Opcode 0x0f 0xc1. */
6142FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6143{
6144 IEMOP_MNEMONIC("xadd Ev,Gv");
6145 IEMOP_HLP_MIN_486();
6146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6147
6148 /*
6149 * If rm is denoting a register, no more instruction bytes.
6150 */
6151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6152 {
6153 IEMOP_HLP_NO_LOCK_PREFIX();
6154
6155 switch (pIemCpu->enmEffOpSize)
6156 {
6157 case IEMMODE_16BIT:
6158 IEM_MC_BEGIN(3, 0);
6159 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6160 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6161 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6162
6163 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6164 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6165 IEM_MC_REF_EFLAGS(pEFlags);
6166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6167
6168 IEM_MC_ADVANCE_RIP();
6169 IEM_MC_END();
6170 return VINF_SUCCESS;
6171
6172 case IEMMODE_32BIT:
6173 IEM_MC_BEGIN(3, 0);
6174 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6175 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6177
6178 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6179 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6180 IEM_MC_REF_EFLAGS(pEFlags);
6181 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6182
6183 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6185 IEM_MC_ADVANCE_RIP();
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 case IEMMODE_64BIT:
6190 IEM_MC_BEGIN(3, 0);
6191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6192 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6194
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6196 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6197 IEM_MC_REF_EFLAGS(pEFlags);
6198 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6199
6200 IEM_MC_ADVANCE_RIP();
6201 IEM_MC_END();
6202 return VINF_SUCCESS;
6203
6204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6205 }
6206 }
6207 else
6208 {
6209 /*
6210 * We're accessing memory.
6211 */
6212 switch (pIemCpu->enmEffOpSize)
6213 {
6214 case IEMMODE_16BIT:
6215 IEM_MC_BEGIN(3, 3);
6216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6217 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6218 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6219 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6221
6222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6223 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6224 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6225 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6226 IEM_MC_FETCH_EFLAGS(EFlags);
6227 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6228 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6229 else
6230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6231
6232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6233 IEM_MC_COMMIT_EFLAGS(EFlags);
6234 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6235 IEM_MC_ADVANCE_RIP();
6236 IEM_MC_END();
6237 return VINF_SUCCESS;
6238
6239 case IEMMODE_32BIT:
6240 IEM_MC_BEGIN(3, 3);
6241 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6242 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6243 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6244 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6246
6247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6248 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6249 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6250 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6251 IEM_MC_FETCH_EFLAGS(EFlags);
6252 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6254 else
6255 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6256
6257 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6258 IEM_MC_COMMIT_EFLAGS(EFlags);
6259 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6260 IEM_MC_ADVANCE_RIP();
6261 IEM_MC_END();
6262 return VINF_SUCCESS;
6263
6264 case IEMMODE_64BIT:
6265 IEM_MC_BEGIN(3, 3);
6266 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6267 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6268 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6269 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6271
6272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6273 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6274 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6275 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6276 IEM_MC_FETCH_EFLAGS(EFlags);
6277 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6278 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6279 else
6280 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6281
6282 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6283 IEM_MC_COMMIT_EFLAGS(EFlags);
6284 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6285 IEM_MC_ADVANCE_RIP();
6286 IEM_MC_END();
6287 return VINF_SUCCESS;
6288
6289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6290 }
6291 }
6292}
6293
6294/** Opcode 0x0f 0xc2. */
6295FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6296
6297/** Opcode 0x0f 0xc3. */
6298FNIEMOP_STUB(iemOp_movnti_My_Gy);
6299
6300/** Opcode 0x0f 0xc4. */
6301FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6302
6303/** Opcode 0x0f 0xc5. */
6304FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6305
6306/** Opcode 0x0f 0xc6. */
6307FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6308
6309
6310/** Opcode 0x0f 0xc7 !11/1. */
6311FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6312{
6313 IEMOP_MNEMONIC("cmpxchg8b Mq");
6314
6315 IEM_MC_BEGIN(4, 3);
6316 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6317 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6318 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6320 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6321 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6323
6324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6325 IEMOP_HLP_DONE_DECODING();
6326 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6327
6328 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6329 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6330 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6331
6332 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6333 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6334 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6335
6336 IEM_MC_FETCH_EFLAGS(EFlags);
6337 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6338 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6339 else
6340 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6341
6342 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6343 IEM_MC_COMMIT_EFLAGS(EFlags);
6344 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6345 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6346 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6347 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6348 IEM_MC_ENDIF();
6349 IEM_MC_ADVANCE_RIP();
6350
6351 IEM_MC_END();
6352 return VINF_SUCCESS;
6353}
6354
6355
6356/** Opcode REX.W 0x0f 0xc7 !11/1. */
6357FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6358
6359/** Opcode 0x0f 0xc7 11/6. */
6360FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6361
6362/** Opcode 0x0f 0xc7 !11/6. */
6363FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6364
6365/** Opcode 0x66 0x0f 0xc7 !11/6. */
6366FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6367
6368/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6369FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6370
6371/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6372FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6373
6374
6375/** Opcode 0x0f 0xc7. */
6376FNIEMOP_DEF(iemOp_Grp9)
6377{
6378 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6380 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6381 {
6382 case 0: case 2: case 3: case 4: case 5:
6383 return IEMOP_RAISE_INVALID_OPCODE();
6384 case 1:
6385 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6386 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6387 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6388 return IEMOP_RAISE_INVALID_OPCODE();
6389 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6390 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6391 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6392 case 6:
6393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6394 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6395 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6396 {
6397 case 0:
6398 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6399 case IEM_OP_PRF_SIZE_OP:
6400 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6401 case IEM_OP_PRF_REPZ:
6402 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6403 default:
6404 return IEMOP_RAISE_INVALID_OPCODE();
6405 }
6406 case 7:
6407 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6408 {
6409 case 0:
6410 case IEM_OP_PRF_REPZ:
6411 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6412 default:
6413 return IEMOP_RAISE_INVALID_OPCODE();
6414 }
6415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6416 }
6417}
6418
6419
6420/**
6421 * Common 'bswap register' helper.
6422 */
6423FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6424{
6425 IEMOP_HLP_NO_LOCK_PREFIX();
6426 switch (pIemCpu->enmEffOpSize)
6427 {
6428 case IEMMODE_16BIT:
6429 IEM_MC_BEGIN(1, 0);
6430 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6431 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6432 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6433 IEM_MC_ADVANCE_RIP();
6434 IEM_MC_END();
6435 return VINF_SUCCESS;
6436
6437 case IEMMODE_32BIT:
6438 IEM_MC_BEGIN(1, 0);
6439 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6440 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6441 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6442 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6443 IEM_MC_ADVANCE_RIP();
6444 IEM_MC_END();
6445 return VINF_SUCCESS;
6446
6447 case IEMMODE_64BIT:
6448 IEM_MC_BEGIN(1, 0);
6449 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6450 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6451 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6452 IEM_MC_ADVANCE_RIP();
6453 IEM_MC_END();
6454 return VINF_SUCCESS;
6455
6456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6457 }
6458}
6459
6460
6461/** Opcode 0x0f 0xc8. */
6462FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6463{
6464 IEMOP_MNEMONIC("bswap rAX/r8");
6465 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6466 prefix. REX.B is the correct prefix it appears. For a parallel
6467 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6468 IEMOP_HLP_MIN_486();
6469 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6470}
6471
6472
6473/** Opcode 0x0f 0xc9. */
6474FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6475{
6476 IEMOP_MNEMONIC("bswap rCX/r9");
6477 IEMOP_HLP_MIN_486();
6478 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6479}
6480
6481
6482/** Opcode 0x0f 0xca. */
6483FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6484{
6485 IEMOP_MNEMONIC("bswap rDX/r9");
6486 IEMOP_HLP_MIN_486();
6487 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6488}
6489
6490
6491/** Opcode 0x0f 0xcb. */
6492FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6493{
6494 IEMOP_MNEMONIC("bswap rBX/r9");
6495 IEMOP_HLP_MIN_486();
6496 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6497}
6498
6499
6500/** Opcode 0x0f 0xcc. */
6501FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6502{
6503 IEMOP_MNEMONIC("bswap rSP/r12");
6504 IEMOP_HLP_MIN_486();
6505 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6506}
6507
6508
6509/** Opcode 0x0f 0xcd. */
6510FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6511{
6512 IEMOP_MNEMONIC("bswap rBP/r13");
6513 IEMOP_HLP_MIN_486();
6514 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6515}
6516
6517
6518/** Opcode 0x0f 0xce. */
6519FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6520{
6521 IEMOP_MNEMONIC("bswap rSI/r14");
6522 IEMOP_HLP_MIN_486();
6523 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6524}
6525
6526
6527/** Opcode 0x0f 0xcf. */
6528FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6529{
6530 IEMOP_MNEMONIC("bswap rDI/r15");
6531 IEMOP_HLP_MIN_486();
6532 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6533}
6534
6535
6536
6537/** Opcode 0x0f 0xd0. */
6538FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6539/** Opcode 0x0f 0xd1. */
6540FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6541/** Opcode 0x0f 0xd2. */
6542FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6543/** Opcode 0x0f 0xd3. */
6544FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6545/** Opcode 0x0f 0xd4. */
6546FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6547/** Opcode 0x0f 0xd5. */
6548FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6549/** Opcode 0x0f 0xd6. */
6550FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6551
6552
6553/** Opcode 0x0f 0xd7. */
6554FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6555{
6556 /* Docs says register only. */
6557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6558 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6559 return IEMOP_RAISE_INVALID_OPCODE();
6560
6561 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6562 /** @todo testcase: Check that the instruction implicitly clears the high
6563 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6564 * and opcode modifications are made to work with the whole width (not
6565 * just 128). */
6566 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6567 {
6568 case IEM_OP_PRF_SIZE_OP: /* SSE */
6569 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6570 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6571 IEM_MC_BEGIN(2, 0);
6572 IEM_MC_ARG(uint64_t *, pDst, 0);
6573 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6574 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6575 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6576 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6577 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6578 IEM_MC_ADVANCE_RIP();
6579 IEM_MC_END();
6580 return VINF_SUCCESS;
6581
6582 case 0: /* MMX */
6583 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6584 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6585 IEM_MC_BEGIN(2, 0);
6586 IEM_MC_ARG(uint64_t *, pDst, 0);
6587 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6588 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6589 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6590 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6591 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6592 IEM_MC_ADVANCE_RIP();
6593 IEM_MC_END();
6594 return VINF_SUCCESS;
6595
6596 default:
6597 return IEMOP_RAISE_INVALID_OPCODE();
6598 }
6599}
6600
6601
6602/** Opcode 0x0f 0xd8. */
6603FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6604/** Opcode 0x0f 0xd9. */
6605FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6606/** Opcode 0x0f 0xda. */
6607FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6608/** Opcode 0x0f 0xdb. */
6609FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6610/** Opcode 0x0f 0xdc. */
6611FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6612/** Opcode 0x0f 0xdd. */
6613FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6614/** Opcode 0x0f 0xde. */
6615FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6616/** Opcode 0x0f 0xdf. */
6617FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6618/** Opcode 0x0f 0xe0. */
6619FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6620/** Opcode 0x0f 0xe1. */
6621FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6622/** Opcode 0x0f 0xe2. */
6623FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6624/** Opcode 0x0f 0xe3. */
6625FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6626/** Opcode 0x0f 0xe4. */
6627FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6628/** Opcode 0x0f 0xe5. */
6629FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6630/** Opcode 0x0f 0xe6. */
6631FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6632/** Opcode 0x0f 0xe7. */
6633FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6634/** Opcode 0x0f 0xe8. */
6635FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6636/** Opcode 0x0f 0xe9. */
6637FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6638/** Opcode 0x0f 0xea. */
6639FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6640/** Opcode 0x0f 0xeb. */
6641FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6642/** Opcode 0x0f 0xec. */
6643FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6644/** Opcode 0x0f 0xed. */
6645FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6646/** Opcode 0x0f 0xee. */
6647FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6648
6649
6650/** Opcode 0x0f 0xef. */
6651FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6652{
6653 IEMOP_MNEMONIC("pxor");
6654 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6655}
6656
6657
6658/** Opcode 0x0f 0xf0. */
6659FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6660/** Opcode 0x0f 0xf1. */
6661FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6662/** Opcode 0x0f 0xf2. */
6663FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6664/** Opcode 0x0f 0xf3. */
6665FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6666/** Opcode 0x0f 0xf4. */
6667FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6668/** Opcode 0x0f 0xf5. */
6669FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6670/** Opcode 0x0f 0xf6. */
6671FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6672/** Opcode 0x0f 0xf7. */
6673FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6674/** Opcode 0x0f 0xf8. */
6675FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6676/** Opcode 0x0f 0xf9. */
6677FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6678/** Opcode 0x0f 0xfa. */
6679FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6680/** Opcode 0x0f 0xfb. */
6681FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6682/** Opcode 0x0f 0xfc. */
6683FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6684/** Opcode 0x0f 0xfd. */
6685FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6686/** Opcode 0x0f 0xfe. */
6687FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6688
6689
6690const PFNIEMOP g_apfnTwoByteMap[256] =
6691{
6692 /* 0x00 */ iemOp_Grp6,
6693 /* 0x01 */ iemOp_Grp7,
6694 /* 0x02 */ iemOp_lar_Gv_Ew,
6695 /* 0x03 */ iemOp_lsl_Gv_Ew,
6696 /* 0x04 */ iemOp_Invalid,
6697 /* 0x05 */ iemOp_syscall,
6698 /* 0x06 */ iemOp_clts,
6699 /* 0x07 */ iemOp_sysret,
6700 /* 0x08 */ iemOp_invd,
6701 /* 0x09 */ iemOp_wbinvd,
6702 /* 0x0a */ iemOp_Invalid,
6703 /* 0x0b */ iemOp_ud2,
6704 /* 0x0c */ iemOp_Invalid,
6705 /* 0x0d */ iemOp_nop_Ev_GrpP,
6706 /* 0x0e */ iemOp_femms,
6707 /* 0x0f */ iemOp_3Dnow,
6708 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6709 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6710 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6711 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6712 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6713 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6714 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6715 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6716 /* 0x18 */ iemOp_prefetch_Grp16,
6717 /* 0x19 */ iemOp_nop_Ev,
6718 /* 0x1a */ iemOp_nop_Ev,
6719 /* 0x1b */ iemOp_nop_Ev,
6720 /* 0x1c */ iemOp_nop_Ev,
6721 /* 0x1d */ iemOp_nop_Ev,
6722 /* 0x1e */ iemOp_nop_Ev,
6723 /* 0x1f */ iemOp_nop_Ev,
6724 /* 0x20 */ iemOp_mov_Rd_Cd,
6725 /* 0x21 */ iemOp_mov_Rd_Dd,
6726 /* 0x22 */ iemOp_mov_Cd_Rd,
6727 /* 0x23 */ iemOp_mov_Dd_Rd,
6728 /* 0x24 */ iemOp_mov_Rd_Td,
6729 /* 0x25 */ iemOp_Invalid,
6730 /* 0x26 */ iemOp_mov_Td_Rd,
6731 /* 0x27 */ iemOp_Invalid,
6732 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6733 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6734 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6735 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6736 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6737 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6738 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6739 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6740 /* 0x30 */ iemOp_wrmsr,
6741 /* 0x31 */ iemOp_rdtsc,
6742 /* 0x32 */ iemOp_rdmsr,
6743 /* 0x33 */ iemOp_rdpmc,
6744 /* 0x34 */ iemOp_sysenter,
6745 /* 0x35 */ iemOp_sysexit,
6746 /* 0x36 */ iemOp_Invalid,
6747 /* 0x37 */ iemOp_getsec,
6748 /* 0x38 */ iemOp_3byte_Esc_A4,
6749 /* 0x39 */ iemOp_Invalid,
6750 /* 0x3a */ iemOp_3byte_Esc_A5,
6751 /* 0x3b */ iemOp_Invalid,
6752 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6753 /* 0x3d */ iemOp_Invalid,
6754 /* 0x3e */ iemOp_Invalid,
6755 /* 0x3f */ iemOp_Invalid,
6756 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6757 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6758 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6759 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6760 /* 0x44 */ iemOp_cmove_Gv_Ev,
6761 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6762 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6763 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6764 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6765 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6766 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6767 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6768 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6769 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6770 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6771 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6772 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6773 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6774 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6775 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6776 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6777 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6778 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6779 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6780 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6781 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6782 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6783 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6784 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6785 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6786 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6787 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6788 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6789 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6790 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6791 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6792 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6793 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6794 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6795 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6796 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6797 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6798 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6799 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6800 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6801 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6802 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6803 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6804 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6805 /* 0x71 */ iemOp_Grp12,
6806 /* 0x72 */ iemOp_Grp13,
6807 /* 0x73 */ iemOp_Grp14,
6808 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6809 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6810 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6811 /* 0x77 */ iemOp_emms,
6812 /* 0x78 */ iemOp_vmread_AmdGrp17,
6813 /* 0x79 */ iemOp_vmwrite,
6814 /* 0x7a */ iemOp_Invalid,
6815 /* 0x7b */ iemOp_Invalid,
6816 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6817 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6818 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6819 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6820 /* 0x80 */ iemOp_jo_Jv,
6821 /* 0x81 */ iemOp_jno_Jv,
6822 /* 0x82 */ iemOp_jc_Jv,
6823 /* 0x83 */ iemOp_jnc_Jv,
6824 /* 0x84 */ iemOp_je_Jv,
6825 /* 0x85 */ iemOp_jne_Jv,
6826 /* 0x86 */ iemOp_jbe_Jv,
6827 /* 0x87 */ iemOp_jnbe_Jv,
6828 /* 0x88 */ iemOp_js_Jv,
6829 /* 0x89 */ iemOp_jns_Jv,
6830 /* 0x8a */ iemOp_jp_Jv,
6831 /* 0x8b */ iemOp_jnp_Jv,
6832 /* 0x8c */ iemOp_jl_Jv,
6833 /* 0x8d */ iemOp_jnl_Jv,
6834 /* 0x8e */ iemOp_jle_Jv,
6835 /* 0x8f */ iemOp_jnle_Jv,
6836 /* 0x90 */ iemOp_seto_Eb,
6837 /* 0x91 */ iemOp_setno_Eb,
6838 /* 0x92 */ iemOp_setc_Eb,
6839 /* 0x93 */ iemOp_setnc_Eb,
6840 /* 0x94 */ iemOp_sete_Eb,
6841 /* 0x95 */ iemOp_setne_Eb,
6842 /* 0x96 */ iemOp_setbe_Eb,
6843 /* 0x97 */ iemOp_setnbe_Eb,
6844 /* 0x98 */ iemOp_sets_Eb,
6845 /* 0x99 */ iemOp_setns_Eb,
6846 /* 0x9a */ iemOp_setp_Eb,
6847 /* 0x9b */ iemOp_setnp_Eb,
6848 /* 0x9c */ iemOp_setl_Eb,
6849 /* 0x9d */ iemOp_setnl_Eb,
6850 /* 0x9e */ iemOp_setle_Eb,
6851 /* 0x9f */ iemOp_setnle_Eb,
6852 /* 0xa0 */ iemOp_push_fs,
6853 /* 0xa1 */ iemOp_pop_fs,
6854 /* 0xa2 */ iemOp_cpuid,
6855 /* 0xa3 */ iemOp_bt_Ev_Gv,
6856 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6857 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6858 /* 0xa6 */ iemOp_Invalid,
6859 /* 0xa7 */ iemOp_Invalid,
6860 /* 0xa8 */ iemOp_push_gs,
6861 /* 0xa9 */ iemOp_pop_gs,
6862 /* 0xaa */ iemOp_rsm,
6863 /* 0xab */ iemOp_bts_Ev_Gv,
6864 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6865 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6866 /* 0xae */ iemOp_Grp15,
6867 /* 0xaf */ iemOp_imul_Gv_Ev,
6868 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6869 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6870 /* 0xb2 */ iemOp_lss_Gv_Mp,
6871 /* 0xb3 */ iemOp_btr_Ev_Gv,
6872 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6873 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6874 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6875 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6876 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6877 /* 0xb9 */ iemOp_Grp10,
6878 /* 0xba */ iemOp_Grp8,
6879 /* 0xbd */ iemOp_btc_Ev_Gv,
6880 /* 0xbc */ iemOp_bsf_Gv_Ev,
6881 /* 0xbd */ iemOp_bsr_Gv_Ev,
6882 /* 0xbe */ iemOp_movsx_Gv_Eb,
6883 /* 0xbf */ iemOp_movsx_Gv_Ew,
6884 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6885 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6886 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6887 /* 0xc3 */ iemOp_movnti_My_Gy,
6888 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6889 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6890 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6891 /* 0xc7 */ iemOp_Grp9,
6892 /* 0xc8 */ iemOp_bswap_rAX_r8,
6893 /* 0xc9 */ iemOp_bswap_rCX_r9,
6894 /* 0xca */ iemOp_bswap_rDX_r10,
6895 /* 0xcb */ iemOp_bswap_rBX_r11,
6896 /* 0xcc */ iemOp_bswap_rSP_r12,
6897 /* 0xcd */ iemOp_bswap_rBP_r13,
6898 /* 0xce */ iemOp_bswap_rSI_r14,
6899 /* 0xcf */ iemOp_bswap_rDI_r15,
6900 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6901 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6902 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6903 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6904 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6905 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6906 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6907 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6908 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6909 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6910 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6911 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6912 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6913 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6914 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6915 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6916 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6917 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6918 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6919 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6920 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6921 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6922 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6923 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6924 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6925 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6926 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6927 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6928 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6929 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6930 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6931 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6932 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6933 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6934 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6935 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6936 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6937 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6938 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6939 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6940 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6941 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6942 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6943 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6944 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6945 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6946 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6947 /* 0xff */ iemOp_Invalid
6948};
6949
6950/** @} */
6951
6952
6953/** @name One byte opcodes.
6954 *
6955 * @{
6956 */
6957
6958/** Opcode 0x00. */
6959FNIEMOP_DEF(iemOp_add_Eb_Gb)
6960{
6961 IEMOP_MNEMONIC("add Eb,Gb");
6962 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6963}
6964
6965
6966/** Opcode 0x01. */
6967FNIEMOP_DEF(iemOp_add_Ev_Gv)
6968{
6969 IEMOP_MNEMONIC("add Ev,Gv");
6970 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6971}
6972
6973
6974/** Opcode 0x02. */
6975FNIEMOP_DEF(iemOp_add_Gb_Eb)
6976{
6977 IEMOP_MNEMONIC("add Gb,Eb");
6978 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6979}
6980
6981
6982/** Opcode 0x03. */
6983FNIEMOP_DEF(iemOp_add_Gv_Ev)
6984{
6985 IEMOP_MNEMONIC("add Gv,Ev");
6986 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6987}
6988
6989
6990/** Opcode 0x04. */
6991FNIEMOP_DEF(iemOp_add_Al_Ib)
6992{
6993 IEMOP_MNEMONIC("add al,Ib");
6994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6995}
6996
6997
6998/** Opcode 0x05. */
6999FNIEMOP_DEF(iemOp_add_eAX_Iz)
7000{
7001 IEMOP_MNEMONIC("add rAX,Iz");
7002 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7003}
7004
7005
7006/** Opcode 0x06. */
7007FNIEMOP_DEF(iemOp_push_ES)
7008{
7009 IEMOP_MNEMONIC("push es");
7010 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7011}
7012
7013
7014/** Opcode 0x07. */
7015FNIEMOP_DEF(iemOp_pop_ES)
7016{
7017 IEMOP_MNEMONIC("pop es");
7018 IEMOP_HLP_NO_64BIT();
7019 IEMOP_HLP_NO_LOCK_PREFIX();
7020 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7021}
7022
7023
7024/** Opcode 0x08. */
7025FNIEMOP_DEF(iemOp_or_Eb_Gb)
7026{
7027 IEMOP_MNEMONIC("or Eb,Gb");
7028 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7029 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7030}
7031
7032
7033/** Opcode 0x09. */
7034FNIEMOP_DEF(iemOp_or_Ev_Gv)
7035{
7036 IEMOP_MNEMONIC("or Ev,Gv ");
7037 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7038 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7039}
7040
7041
7042/** Opcode 0x0a. */
7043FNIEMOP_DEF(iemOp_or_Gb_Eb)
7044{
7045 IEMOP_MNEMONIC("or Gb,Eb");
7046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7047 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7048}
7049
7050
7051/** Opcode 0x0b. */
7052FNIEMOP_DEF(iemOp_or_Gv_Ev)
7053{
7054 IEMOP_MNEMONIC("or Gv,Ev");
7055 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7056 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7057}
7058
7059
7060/** Opcode 0x0c. */
7061FNIEMOP_DEF(iemOp_or_Al_Ib)
7062{
7063 IEMOP_MNEMONIC("or al,Ib");
7064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7065 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7066}
7067
7068
7069/** Opcode 0x0d. */
7070FNIEMOP_DEF(iemOp_or_eAX_Iz)
7071{
7072 IEMOP_MNEMONIC("or rAX,Iz");
7073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7074 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7075}
7076
7077
7078/** Opcode 0x0e. */
7079FNIEMOP_DEF(iemOp_push_CS)
7080{
7081 IEMOP_MNEMONIC("push cs");
7082 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7083}
7084
7085
7086/** Opcode 0x0f. */
7087FNIEMOP_DEF(iemOp_2byteEscape)
7088{
7089 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7090 /** @todo PUSH CS on 8086, undefined on 80186. */
7091 IEMOP_HLP_MIN_286();
7092 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7093}
7094
7095/** Opcode 0x10. */
7096FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7097{
7098 IEMOP_MNEMONIC("adc Eb,Gb");
7099 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7100}
7101
7102
7103/** Opcode 0x11. */
7104FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7105{
7106 IEMOP_MNEMONIC("adc Ev,Gv");
7107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7108}
7109
7110
7111/** Opcode 0x12. */
7112FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7113{
7114 IEMOP_MNEMONIC("adc Gb,Eb");
7115 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7116}
7117
7118
7119/** Opcode 0x13. */
7120FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7121{
7122 IEMOP_MNEMONIC("adc Gv,Ev");
7123 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7124}
7125
7126
7127/** Opcode 0x14. */
7128FNIEMOP_DEF(iemOp_adc_Al_Ib)
7129{
7130 IEMOP_MNEMONIC("adc al,Ib");
7131 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7132}
7133
7134
7135/** Opcode 0x15. */
7136FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7137{
7138 IEMOP_MNEMONIC("adc rAX,Iz");
7139 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7140}
7141
7142
7143/** Opcode 0x16. */
7144FNIEMOP_DEF(iemOp_push_SS)
7145{
7146 IEMOP_MNEMONIC("push ss");
7147 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7148}
7149
7150
7151/** Opcode 0x17. */
7152FNIEMOP_DEF(iemOp_pop_SS)
7153{
7154 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7155 IEMOP_HLP_NO_LOCK_PREFIX();
7156 IEMOP_HLP_NO_64BIT();
7157 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7158}
7159
7160
7161/** Opcode 0x18. */
7162FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7163{
7164 IEMOP_MNEMONIC("sbb Eb,Gb");
7165 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7166}
7167
7168
7169/** Opcode 0x19. */
7170FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7171{
7172 IEMOP_MNEMONIC("sbb Ev,Gv");
7173 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7174}
7175
7176
7177/** Opcode 0x1a. */
7178FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7179{
7180 IEMOP_MNEMONIC("sbb Gb,Eb");
7181 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7182}
7183
7184
7185/** Opcode 0x1b. */
7186FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7187{
7188 IEMOP_MNEMONIC("sbb Gv,Ev");
7189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7190}
7191
7192
7193/** Opcode 0x1c. */
7194FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7195{
7196 IEMOP_MNEMONIC("sbb al,Ib");
7197 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7198}
7199
7200
7201/** Opcode 0x1d. */
7202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7203{
7204 IEMOP_MNEMONIC("sbb rAX,Iz");
7205 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7206}
7207
7208
7209/** Opcode 0x1e. */
7210FNIEMOP_DEF(iemOp_push_DS)
7211{
7212 IEMOP_MNEMONIC("push ds");
7213 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7214}
7215
7216
7217/** Opcode 0x1f. */
7218FNIEMOP_DEF(iemOp_pop_DS)
7219{
7220 IEMOP_MNEMONIC("pop ds");
7221 IEMOP_HLP_NO_LOCK_PREFIX();
7222 IEMOP_HLP_NO_64BIT();
7223 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7224}
7225
7226
7227/** Opcode 0x20. */
7228FNIEMOP_DEF(iemOp_and_Eb_Gb)
7229{
7230 IEMOP_MNEMONIC("and Eb,Gb");
7231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7233}
7234
7235
7236/** Opcode 0x21. */
7237FNIEMOP_DEF(iemOp_and_Ev_Gv)
7238{
7239 IEMOP_MNEMONIC("and Ev,Gv");
7240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7242}
7243
7244
7245/** Opcode 0x22. */
7246FNIEMOP_DEF(iemOp_and_Gb_Eb)
7247{
7248 IEMOP_MNEMONIC("and Gb,Eb");
7249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7250 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7251}
7252
7253
7254/** Opcode 0x23. */
7255FNIEMOP_DEF(iemOp_and_Gv_Ev)
7256{
7257 IEMOP_MNEMONIC("and Gv,Ev");
7258 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7259 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7260}
7261
7262
7263/** Opcode 0x24. */
7264FNIEMOP_DEF(iemOp_and_Al_Ib)
7265{
7266 IEMOP_MNEMONIC("and al,Ib");
7267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7268 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7269}
7270
7271
7272/** Opcode 0x25. */
7273FNIEMOP_DEF(iemOp_and_eAX_Iz)
7274{
7275 IEMOP_MNEMONIC("and rAX,Iz");
7276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7277 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7278}
7279
7280
7281/** Opcode 0x26. */
7282FNIEMOP_DEF(iemOp_seg_ES)
7283{
7284 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7285 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7286 pIemCpu->iEffSeg = X86_SREG_ES;
7287
7288 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7289 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7290}
7291
7292
7293/** Opcode 0x27. */
7294FNIEMOP_DEF(iemOp_daa)
7295{
7296 IEMOP_MNEMONIC("daa AL");
7297 IEMOP_HLP_NO_64BIT();
7298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7299 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7300 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7301}
7302
7303
7304/** Opcode 0x28. */
7305FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7306{
7307 IEMOP_MNEMONIC("sub Eb,Gb");
7308 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7309}
7310
7311
7312/** Opcode 0x29. */
7313FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7314{
7315 IEMOP_MNEMONIC("sub Ev,Gv");
7316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7317}
7318
7319
7320/** Opcode 0x2a. */
7321FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7322{
7323 IEMOP_MNEMONIC("sub Gb,Eb");
7324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7325}
7326
7327
7328/** Opcode 0x2b. */
7329FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7330{
7331 IEMOP_MNEMONIC("sub Gv,Ev");
7332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7333}
7334
7335
7336/** Opcode 0x2c. */
7337FNIEMOP_DEF(iemOp_sub_Al_Ib)
7338{
7339 IEMOP_MNEMONIC("sub al,Ib");
7340 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7341}
7342
7343
7344/** Opcode 0x2d. */
7345FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7346{
7347 IEMOP_MNEMONIC("sub rAX,Iz");
7348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7349}
7350
7351
7352/** Opcode 0x2e. */
7353FNIEMOP_DEF(iemOp_seg_CS)
7354{
7355 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7356 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7357 pIemCpu->iEffSeg = X86_SREG_CS;
7358
7359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7361}
7362
7363
7364/** Opcode 0x2f. */
7365FNIEMOP_DEF(iemOp_das)
7366{
7367 IEMOP_MNEMONIC("das AL");
7368 IEMOP_HLP_NO_64BIT();
7369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7371 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7372}
7373
7374
7375/** Opcode 0x30. */
7376FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7377{
7378 IEMOP_MNEMONIC("xor Eb,Gb");
7379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7381}
7382
7383
7384/** Opcode 0x31. */
7385FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7386{
7387 IEMOP_MNEMONIC("xor Ev,Gv");
7388 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7389 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7390}
7391
7392
7393/** Opcode 0x32. */
7394FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7395{
7396 IEMOP_MNEMONIC("xor Gb,Eb");
7397 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7398 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7399}
7400
7401
7402/** Opcode 0x33. */
7403FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7404{
7405 IEMOP_MNEMONIC("xor Gv,Ev");
7406 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7408}
7409
7410
7411/** Opcode 0x34. */
7412FNIEMOP_DEF(iemOp_xor_Al_Ib)
7413{
7414 IEMOP_MNEMONIC("xor al,Ib");
7415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7416 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7417}
7418
7419
7420/** Opcode 0x35. */
7421FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7422{
7423 IEMOP_MNEMONIC("xor rAX,Iz");
7424 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7425 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7426}
7427
7428
7429/** Opcode 0x36. */
7430FNIEMOP_DEF(iemOp_seg_SS)
7431{
7432 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7433 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7434 pIemCpu->iEffSeg = X86_SREG_SS;
7435
7436 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7437 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7438}
7439
7440
7441/** Opcode 0x37. */
7442FNIEMOP_STUB(iemOp_aaa);
7443
7444
7445/** Opcode 0x38. */
7446FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7447{
7448 IEMOP_MNEMONIC("cmp Eb,Gb");
7449 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7450 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7451}
7452
7453
7454/** Opcode 0x39. */
7455FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7456{
7457 IEMOP_MNEMONIC("cmp Ev,Gv");
7458 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7459 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7460}
7461
7462
7463/** Opcode 0x3a. */
7464FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7465{
7466 IEMOP_MNEMONIC("cmp Gb,Eb");
7467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7468}
7469
7470
7471/** Opcode 0x3b. */
7472FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7473{
7474 IEMOP_MNEMONIC("cmp Gv,Ev");
7475 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7476}
7477
7478
7479/** Opcode 0x3c. */
7480FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7481{
7482 IEMOP_MNEMONIC("cmp al,Ib");
7483 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7484}
7485
7486
7487/** Opcode 0x3d. */
7488FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7489{
7490 IEMOP_MNEMONIC("cmp rAX,Iz");
7491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7492}
7493
7494
7495/** Opcode 0x3e. */
7496FNIEMOP_DEF(iemOp_seg_DS)
7497{
7498 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7499 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7500 pIemCpu->iEffSeg = X86_SREG_DS;
7501
7502 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7503 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7504}
7505
7506
7507/** Opcode 0x3f. */
7508FNIEMOP_STUB(iemOp_aas);
7509
7510/**
7511 * Common 'inc/dec/not/neg register' helper.
7512 */
7513FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7514{
7515 IEMOP_HLP_NO_LOCK_PREFIX();
7516 switch (pIemCpu->enmEffOpSize)
7517 {
7518 case IEMMODE_16BIT:
7519 IEM_MC_BEGIN(2, 0);
7520 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7521 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7522 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7523 IEM_MC_REF_EFLAGS(pEFlags);
7524 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7525 IEM_MC_ADVANCE_RIP();
7526 IEM_MC_END();
7527 return VINF_SUCCESS;
7528
7529 case IEMMODE_32BIT:
7530 IEM_MC_BEGIN(2, 0);
7531 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7532 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7533 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7534 IEM_MC_REF_EFLAGS(pEFlags);
7535 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7536 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7537 IEM_MC_ADVANCE_RIP();
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540
7541 case IEMMODE_64BIT:
7542 IEM_MC_BEGIN(2, 0);
7543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7544 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7545 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7546 IEM_MC_REF_EFLAGS(pEFlags);
7547 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7548 IEM_MC_ADVANCE_RIP();
7549 IEM_MC_END();
7550 return VINF_SUCCESS;
7551 }
7552 return VINF_SUCCESS;
7553}
7554
7555
7556/** Opcode 0x40. */
7557FNIEMOP_DEF(iemOp_inc_eAX)
7558{
7559 /*
7560 * This is a REX prefix in 64-bit mode.
7561 */
7562 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7563 {
7564 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7565 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7566
7567 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7568 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7569 }
7570
7571 IEMOP_MNEMONIC("inc eAX");
7572 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7573}
7574
7575
7576/** Opcode 0x41. */
7577FNIEMOP_DEF(iemOp_inc_eCX)
7578{
7579 /*
7580 * This is a REX prefix in 64-bit mode.
7581 */
7582 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7583 {
7584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7585 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7586 pIemCpu->uRexB = 1 << 3;
7587
7588 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7589 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7590 }
7591
7592 IEMOP_MNEMONIC("inc eCX");
7593 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7594}
7595
7596
7597/** Opcode 0x42. */
7598FNIEMOP_DEF(iemOp_inc_eDX)
7599{
7600 /*
7601 * This is a REX prefix in 64-bit mode.
7602 */
7603 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7604 {
7605 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7606 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7607 pIemCpu->uRexIndex = 1 << 3;
7608
7609 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7610 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7611 }
7612
7613 IEMOP_MNEMONIC("inc eDX");
7614 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7615}
7616
7617
7618
7619/** Opcode 0x43. */
7620FNIEMOP_DEF(iemOp_inc_eBX)
7621{
7622 /*
7623 * This is a REX prefix in 64-bit mode.
7624 */
7625 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7626 {
7627 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7628 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7629 pIemCpu->uRexB = 1 << 3;
7630 pIemCpu->uRexIndex = 1 << 3;
7631
7632 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7633 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7634 }
7635
7636 IEMOP_MNEMONIC("inc eBX");
7637 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7638}
7639
7640
7641/** Opcode 0x44. */
7642FNIEMOP_DEF(iemOp_inc_eSP)
7643{
7644 /*
7645 * This is a REX prefix in 64-bit mode.
7646 */
7647 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7648 {
7649 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7650 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7651 pIemCpu->uRexReg = 1 << 3;
7652
7653 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7654 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7655 }
7656
7657 IEMOP_MNEMONIC("inc eSP");
7658 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7659}
7660
7661
7662/** Opcode 0x45. */
7663FNIEMOP_DEF(iemOp_inc_eBP)
7664{
7665 /*
7666 * This is a REX prefix in 64-bit mode.
7667 */
7668 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7669 {
7670 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7671 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7672 pIemCpu->uRexReg = 1 << 3;
7673 pIemCpu->uRexB = 1 << 3;
7674
7675 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7676 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7677 }
7678
7679 IEMOP_MNEMONIC("inc eBP");
7680 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7681}
7682
7683
7684/** Opcode 0x46. */
7685FNIEMOP_DEF(iemOp_inc_eSI)
7686{
7687 /*
7688 * This is a REX prefix in 64-bit mode.
7689 */
7690 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7691 {
7692 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7693 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7694 pIemCpu->uRexReg = 1 << 3;
7695 pIemCpu->uRexIndex = 1 << 3;
7696
7697 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7698 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7699 }
7700
7701 IEMOP_MNEMONIC("inc eSI");
7702 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7703}
7704
7705
7706/** Opcode 0x47. */
7707FNIEMOP_DEF(iemOp_inc_eDI)
7708{
7709 /*
7710 * This is a REX prefix in 64-bit mode.
7711 */
7712 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7713 {
7714 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7715 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7716 pIemCpu->uRexReg = 1 << 3;
7717 pIemCpu->uRexB = 1 << 3;
7718 pIemCpu->uRexIndex = 1 << 3;
7719
7720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7722 }
7723
7724 IEMOP_MNEMONIC("inc eDI");
7725 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7726}
7727
7728
7729/** Opcode 0x48. */
7730FNIEMOP_DEF(iemOp_dec_eAX)
7731{
7732 /*
7733 * This is a REX prefix in 64-bit mode.
7734 */
7735 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7736 {
7737 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7738 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7739 iemRecalEffOpSize(pIemCpu);
7740
7741 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7742 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7743 }
7744
7745 IEMOP_MNEMONIC("dec eAX");
7746 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7747}
7748
7749
7750/** Opcode 0x49. */
7751FNIEMOP_DEF(iemOp_dec_eCX)
7752{
7753 /*
7754 * This is a REX prefix in 64-bit mode.
7755 */
7756 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7757 {
7758 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7759 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7760 pIemCpu->uRexB = 1 << 3;
7761 iemRecalEffOpSize(pIemCpu);
7762
7763 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7764 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7765 }
7766
7767 IEMOP_MNEMONIC("dec eCX");
7768 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7769}
7770
7771
7772/** Opcode 0x4a. */
7773FNIEMOP_DEF(iemOp_dec_eDX)
7774{
7775 /*
7776 * This is a REX prefix in 64-bit mode.
7777 */
7778 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7779 {
7780 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7781 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7782 pIemCpu->uRexIndex = 1 << 3;
7783 iemRecalEffOpSize(pIemCpu);
7784
7785 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7786 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7787 }
7788
7789 IEMOP_MNEMONIC("dec eDX");
7790 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7791}
7792
7793
7794/** Opcode 0x4b. */
7795FNIEMOP_DEF(iemOp_dec_eBX)
7796{
7797 /*
7798 * This is a REX prefix in 64-bit mode.
7799 */
7800 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7801 {
7802 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7803 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7804 pIemCpu->uRexB = 1 << 3;
7805 pIemCpu->uRexIndex = 1 << 3;
7806 iemRecalEffOpSize(pIemCpu);
7807
7808 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7809 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7810 }
7811
7812 IEMOP_MNEMONIC("dec eBX");
7813 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7814}
7815
7816
7817/** Opcode 0x4c. */
7818FNIEMOP_DEF(iemOp_dec_eSP)
7819{
7820 /*
7821 * This is a REX prefix in 64-bit mode.
7822 */
7823 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7824 {
7825 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7826 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7827 pIemCpu->uRexReg = 1 << 3;
7828 iemRecalEffOpSize(pIemCpu);
7829
7830 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7831 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7832 }
7833
7834 IEMOP_MNEMONIC("dec eSP");
7835 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7836}
7837
7838
7839/** Opcode 0x4d. */
7840FNIEMOP_DEF(iemOp_dec_eBP)
7841{
7842 /*
7843 * This is a REX prefix in 64-bit mode.
7844 */
7845 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7846 {
7847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7848 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7849 pIemCpu->uRexReg = 1 << 3;
7850 pIemCpu->uRexB = 1 << 3;
7851 iemRecalEffOpSize(pIemCpu);
7852
7853 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7854 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7855 }
7856
7857 IEMOP_MNEMONIC("dec eBP");
7858 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7859}
7860
7861
7862/** Opcode 0x4e. */
7863FNIEMOP_DEF(iemOp_dec_eSI)
7864{
7865 /*
7866 * This is a REX prefix in 64-bit mode.
7867 */
7868 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7869 {
7870 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7871 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7872 pIemCpu->uRexReg = 1 << 3;
7873 pIemCpu->uRexIndex = 1 << 3;
7874 iemRecalEffOpSize(pIemCpu);
7875
7876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7878 }
7879
7880 IEMOP_MNEMONIC("dec eSI");
7881 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7882}
7883
7884
7885/** Opcode 0x4f. */
7886FNIEMOP_DEF(iemOp_dec_eDI)
7887{
7888 /*
7889 * This is a REX prefix in 64-bit mode.
7890 */
7891 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7892 {
7893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7894 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7895 pIemCpu->uRexReg = 1 << 3;
7896 pIemCpu->uRexB = 1 << 3;
7897 pIemCpu->uRexIndex = 1 << 3;
7898 iemRecalEffOpSize(pIemCpu);
7899
7900 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7901 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7902 }
7903
7904 IEMOP_MNEMONIC("dec eDI");
7905 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7906}
7907
7908
7909/**
7910 * Common 'push register' helper.
7911 */
7912FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7913{
7914 IEMOP_HLP_NO_LOCK_PREFIX();
7915 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7916 {
7917 iReg |= pIemCpu->uRexB;
7918 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7919 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7920 }
7921
7922 switch (pIemCpu->enmEffOpSize)
7923 {
7924 case IEMMODE_16BIT:
7925 IEM_MC_BEGIN(0, 1);
7926 IEM_MC_LOCAL(uint16_t, u16Value);
7927 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7928 IEM_MC_PUSH_U16(u16Value);
7929 IEM_MC_ADVANCE_RIP();
7930 IEM_MC_END();
7931 break;
7932
7933 case IEMMODE_32BIT:
7934 IEM_MC_BEGIN(0, 1);
7935 IEM_MC_LOCAL(uint32_t, u32Value);
7936 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7937 IEM_MC_PUSH_U32(u32Value);
7938 IEM_MC_ADVANCE_RIP();
7939 IEM_MC_END();
7940 break;
7941
7942 case IEMMODE_64BIT:
7943 IEM_MC_BEGIN(0, 1);
7944 IEM_MC_LOCAL(uint64_t, u64Value);
7945 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7946 IEM_MC_PUSH_U64(u64Value);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 break;
7950 }
7951
7952 return VINF_SUCCESS;
7953}
7954
7955
7956/** Opcode 0x50. */
7957FNIEMOP_DEF(iemOp_push_eAX)
7958{
7959 IEMOP_MNEMONIC("push rAX");
7960 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7961}
7962
7963
7964/** Opcode 0x51. */
7965FNIEMOP_DEF(iemOp_push_eCX)
7966{
7967 IEMOP_MNEMONIC("push rCX");
7968 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7969}
7970
7971
7972/** Opcode 0x52. */
7973FNIEMOP_DEF(iemOp_push_eDX)
7974{
7975 IEMOP_MNEMONIC("push rDX");
7976 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7977}
7978
7979
7980/** Opcode 0x53. */
7981FNIEMOP_DEF(iemOp_push_eBX)
7982{
7983 IEMOP_MNEMONIC("push rBX");
7984 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7985}
7986
7987
7988/** Opcode 0x54. */
7989FNIEMOP_DEF(iemOp_push_eSP)
7990{
7991 IEMOP_MNEMONIC("push rSP");
7992 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
7993}
7994
7995
7996/** Opcode 0x55. */
7997FNIEMOP_DEF(iemOp_push_eBP)
7998{
7999 IEMOP_MNEMONIC("push rBP");
8000 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8001}
8002
8003
8004/** Opcode 0x56. */
8005FNIEMOP_DEF(iemOp_push_eSI)
8006{
8007 IEMOP_MNEMONIC("push rSI");
8008 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8009}
8010
8011
8012/** Opcode 0x57. */
8013FNIEMOP_DEF(iemOp_push_eDI)
8014{
8015 IEMOP_MNEMONIC("push rDI");
8016 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8017}
8018
8019
8020/**
8021 * Common 'pop register' helper.
8022 */
8023FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8024{
8025 IEMOP_HLP_NO_LOCK_PREFIX();
8026 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8027 {
8028 iReg |= pIemCpu->uRexB;
8029 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8030 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8031 }
8032
8033 switch (pIemCpu->enmEffOpSize)
8034 {
8035 case IEMMODE_16BIT:
8036 IEM_MC_BEGIN(0, 1);
8037 IEM_MC_LOCAL(uint16_t, *pu16Dst);
8038 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8039 IEM_MC_POP_U16(pu16Dst);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 break;
8043
8044 case IEMMODE_32BIT:
8045 IEM_MC_BEGIN(0, 1);
8046 IEM_MC_LOCAL(uint32_t, *pu32Dst);
8047 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8048 IEM_MC_POP_U32(pu32Dst);
8049 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8050 IEM_MC_ADVANCE_RIP();
8051 IEM_MC_END();
8052 break;
8053
8054 case IEMMODE_64BIT:
8055 IEM_MC_BEGIN(0, 1);
8056 IEM_MC_LOCAL(uint64_t, *pu64Dst);
8057 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8058 IEM_MC_POP_U64(pu64Dst);
8059 IEM_MC_ADVANCE_RIP();
8060 IEM_MC_END();
8061 break;
8062 }
8063
8064 return VINF_SUCCESS;
8065}
8066
8067
8068/** Opcode 0x58. */
8069FNIEMOP_DEF(iemOp_pop_eAX)
8070{
8071 IEMOP_MNEMONIC("pop rAX");
8072 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8073}
8074
8075
8076/** Opcode 0x59. */
8077FNIEMOP_DEF(iemOp_pop_eCX)
8078{
8079 IEMOP_MNEMONIC("pop rCX");
8080 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8081}
8082
8083
8084/** Opcode 0x5a. */
8085FNIEMOP_DEF(iemOp_pop_eDX)
8086{
8087 IEMOP_MNEMONIC("pop rDX");
8088 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8089}
8090
8091
8092/** Opcode 0x5b. */
8093FNIEMOP_DEF(iemOp_pop_eBX)
8094{
8095 IEMOP_MNEMONIC("pop rBX");
8096 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8097}
8098
8099
8100/** Opcode 0x5c. */
8101FNIEMOP_DEF(iemOp_pop_eSP)
8102{
8103 IEMOP_MNEMONIC("pop rSP");
8104 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8105 {
8106 if (pIemCpu->uRexB)
8107 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8108 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8109 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8110 }
8111
8112 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8113 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8114 /** @todo add testcase for this instruction. */
8115 switch (pIemCpu->enmEffOpSize)
8116 {
8117 case IEMMODE_16BIT:
8118 IEM_MC_BEGIN(0, 1);
8119 IEM_MC_LOCAL(uint16_t, u16Dst);
8120 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8121 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8122 IEM_MC_ADVANCE_RIP();
8123 IEM_MC_END();
8124 break;
8125
8126 case IEMMODE_32BIT:
8127 IEM_MC_BEGIN(0, 1);
8128 IEM_MC_LOCAL(uint32_t, u32Dst);
8129 IEM_MC_POP_U32(&u32Dst);
8130 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8131 IEM_MC_ADVANCE_RIP();
8132 IEM_MC_END();
8133 break;
8134
8135 case IEMMODE_64BIT:
8136 IEM_MC_BEGIN(0, 1);
8137 IEM_MC_LOCAL(uint64_t, u64Dst);
8138 IEM_MC_POP_U64(&u64Dst);
8139 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8140 IEM_MC_ADVANCE_RIP();
8141 IEM_MC_END();
8142 break;
8143 }
8144
8145 return VINF_SUCCESS;
8146}
8147
8148
8149/** Opcode 0x5d. */
8150FNIEMOP_DEF(iemOp_pop_eBP)
8151{
8152 IEMOP_MNEMONIC("pop rBP");
8153 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8154}
8155
8156
8157/** Opcode 0x5e. */
8158FNIEMOP_DEF(iemOp_pop_eSI)
8159{
8160 IEMOP_MNEMONIC("pop rSI");
8161 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8162}
8163
8164
8165/** Opcode 0x5f. */
8166FNIEMOP_DEF(iemOp_pop_eDI)
8167{
8168 IEMOP_MNEMONIC("pop rDI");
8169 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8170}
8171
8172
8173/** Opcode 0x60. */
8174FNIEMOP_DEF(iemOp_pusha)
8175{
8176 IEMOP_MNEMONIC("pusha");
8177 IEMOP_HLP_MIN_186();
8178 IEMOP_HLP_NO_64BIT();
8179 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8180 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8181 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8182 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8183}
8184
8185
8186/** Opcode 0x61. */
8187FNIEMOP_DEF(iemOp_popa)
8188{
8189 IEMOP_MNEMONIC("popa");
8190 IEMOP_HLP_MIN_186();
8191 IEMOP_HLP_NO_64BIT();
8192 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8193 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8194 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8195 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8196}
8197
8198
8199/** Opcode 0x62. */
8200FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8201// IEMOP_HLP_MIN_186();
8202
8203
8204/** Opcode 0x63 - non-64-bit modes. */
8205FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8206{
8207 IEMOP_MNEMONIC("arpl Ew,Gw");
8208 IEMOP_HLP_MIN_286();
8209 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8211
8212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8213 {
8214 /* Register */
8215 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8216 IEM_MC_BEGIN(3, 0);
8217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8218 IEM_MC_ARG(uint16_t, u16Src, 1);
8219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8220
8221 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8222 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8223 IEM_MC_REF_EFLAGS(pEFlags);
8224 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8225
8226 IEM_MC_ADVANCE_RIP();
8227 IEM_MC_END();
8228 }
8229 else
8230 {
8231 /* Memory */
8232 IEM_MC_BEGIN(3, 2);
8233 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8234 IEM_MC_ARG(uint16_t, u16Src, 1);
8235 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8237
8238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8239 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8240 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8241 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8242 IEM_MC_FETCH_EFLAGS(EFlags);
8243 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8244
8245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8246 IEM_MC_COMMIT_EFLAGS(EFlags);
8247 IEM_MC_ADVANCE_RIP();
8248 IEM_MC_END();
8249 }
8250 return VINF_SUCCESS;
8251
8252}
8253
8254
8255/** Opcode 0x63.
8256 * @note This is a weird one. It works like a regular move instruction if
8257 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8258 * @todo This definitely needs a testcase to verify the odd cases. */
8259FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8260{
8261 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8262
8263 IEMOP_MNEMONIC("movsxd Gv,Ev");
8264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8265
8266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8267 {
8268 /*
8269 * Register to register.
8270 */
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8272 IEM_MC_BEGIN(0, 1);
8273 IEM_MC_LOCAL(uint64_t, u64Value);
8274 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8275 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8276 IEM_MC_ADVANCE_RIP();
8277 IEM_MC_END();
8278 }
8279 else
8280 {
8281 /*
8282 * We're loading a register from memory.
8283 */
8284 IEM_MC_BEGIN(0, 2);
8285 IEM_MC_LOCAL(uint64_t, u64Value);
8286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8289 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8290 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8291 IEM_MC_ADVANCE_RIP();
8292 IEM_MC_END();
8293 }
8294 return VINF_SUCCESS;
8295}
8296
8297
8298/** Opcode 0x64. */
8299FNIEMOP_DEF(iemOp_seg_FS)
8300{
8301 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8302 IEMOP_HLP_MIN_386();
8303
8304 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8305 pIemCpu->iEffSeg = X86_SREG_FS;
8306
8307 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8308 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8309}
8310
8311
8312/** Opcode 0x65. */
8313FNIEMOP_DEF(iemOp_seg_GS)
8314{
8315 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8316 IEMOP_HLP_MIN_386();
8317
8318 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8319 pIemCpu->iEffSeg = X86_SREG_GS;
8320
8321 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8322 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8323}
8324
8325
8326/** Opcode 0x66. */
8327FNIEMOP_DEF(iemOp_op_size)
8328{
8329 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8330 IEMOP_HLP_MIN_386();
8331
8332 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8333 iemRecalEffOpSize(pIemCpu);
8334
8335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8337}
8338
8339
8340/** Opcode 0x67. */
8341FNIEMOP_DEF(iemOp_addr_size)
8342{
8343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8344 IEMOP_HLP_MIN_386();
8345
8346 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8347 switch (pIemCpu->enmDefAddrMode)
8348 {
8349 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8350 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8351 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8352 default: AssertFailed();
8353 }
8354
8355 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8356 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8357}
8358
8359
8360/** Opcode 0x68. */
8361FNIEMOP_DEF(iemOp_push_Iz)
8362{
8363 IEMOP_MNEMONIC("push Iz");
8364 IEMOP_HLP_MIN_186();
8365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8366 switch (pIemCpu->enmEffOpSize)
8367 {
8368 case IEMMODE_16BIT:
8369 {
8370 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8371 IEMOP_HLP_NO_LOCK_PREFIX();
8372 IEM_MC_BEGIN(0,0);
8373 IEM_MC_PUSH_U16(u16Imm);
8374 IEM_MC_ADVANCE_RIP();
8375 IEM_MC_END();
8376 return VINF_SUCCESS;
8377 }
8378
8379 case IEMMODE_32BIT:
8380 {
8381 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8382 IEMOP_HLP_NO_LOCK_PREFIX();
8383 IEM_MC_BEGIN(0,0);
8384 IEM_MC_PUSH_U32(u32Imm);
8385 IEM_MC_ADVANCE_RIP();
8386 IEM_MC_END();
8387 return VINF_SUCCESS;
8388 }
8389
8390 case IEMMODE_64BIT:
8391 {
8392 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8393 IEMOP_HLP_NO_LOCK_PREFIX();
8394 IEM_MC_BEGIN(0,0);
8395 IEM_MC_PUSH_U64(u64Imm);
8396 IEM_MC_ADVANCE_RIP();
8397 IEM_MC_END();
8398 return VINF_SUCCESS;
8399 }
8400
8401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8402 }
8403}
8404
8405
8406/** Opcode 0x69. */
8407FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8408{
8409 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8410 IEMOP_HLP_MIN_186();
8411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8412 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8413
8414 switch (pIemCpu->enmEffOpSize)
8415 {
8416 case IEMMODE_16BIT:
8417 {
8418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8419 {
8420 /* register operand */
8421 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8423
8424 IEM_MC_BEGIN(3, 1);
8425 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8426 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8427 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8428 IEM_MC_LOCAL(uint16_t, u16Tmp);
8429
8430 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8431 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8432 IEM_MC_REF_EFLAGS(pEFlags);
8433 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8434 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8435
8436 IEM_MC_ADVANCE_RIP();
8437 IEM_MC_END();
8438 }
8439 else
8440 {
8441 /* memory operand */
8442 IEM_MC_BEGIN(3, 2);
8443 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8444 IEM_MC_ARG(uint16_t, u16Src, 1);
8445 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8446 IEM_MC_LOCAL(uint16_t, u16Tmp);
8447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8448
8449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8450 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8451 IEM_MC_ASSIGN(u16Src, u16Imm);
8452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8453 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8454 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8455 IEM_MC_REF_EFLAGS(pEFlags);
8456 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8457 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8458
8459 IEM_MC_ADVANCE_RIP();
8460 IEM_MC_END();
8461 }
8462 return VINF_SUCCESS;
8463 }
8464
8465 case IEMMODE_32BIT:
8466 {
8467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8468 {
8469 /* register operand */
8470 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8472
8473 IEM_MC_BEGIN(3, 1);
8474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8475 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8477 IEM_MC_LOCAL(uint32_t, u32Tmp);
8478
8479 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8480 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8481 IEM_MC_REF_EFLAGS(pEFlags);
8482 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8483 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8484
8485 IEM_MC_ADVANCE_RIP();
8486 IEM_MC_END();
8487 }
8488 else
8489 {
8490 /* memory operand */
8491 IEM_MC_BEGIN(3, 2);
8492 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8493 IEM_MC_ARG(uint32_t, u32Src, 1);
8494 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8495 IEM_MC_LOCAL(uint32_t, u32Tmp);
8496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8497
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8499 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8500 IEM_MC_ASSIGN(u32Src, u32Imm);
8501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8502 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8503 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8504 IEM_MC_REF_EFLAGS(pEFlags);
8505 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8506 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8507
8508 IEM_MC_ADVANCE_RIP();
8509 IEM_MC_END();
8510 }
8511 return VINF_SUCCESS;
8512 }
8513
8514 case IEMMODE_64BIT:
8515 {
8516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8517 {
8518 /* register operand */
8519 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8521
8522 IEM_MC_BEGIN(3, 1);
8523 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8524 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8526 IEM_MC_LOCAL(uint64_t, u64Tmp);
8527
8528 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8529 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8530 IEM_MC_REF_EFLAGS(pEFlags);
8531 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8532 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8533
8534 IEM_MC_ADVANCE_RIP();
8535 IEM_MC_END();
8536 }
8537 else
8538 {
8539 /* memory operand */
8540 IEM_MC_BEGIN(3, 2);
8541 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8542 IEM_MC_ARG(uint64_t, u64Src, 1);
8543 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8544 IEM_MC_LOCAL(uint64_t, u64Tmp);
8545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8546
8547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8548 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8549 IEM_MC_ASSIGN(u64Src, u64Imm);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8552 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8553 IEM_MC_REF_EFLAGS(pEFlags);
8554 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8555 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8556
8557 IEM_MC_ADVANCE_RIP();
8558 IEM_MC_END();
8559 }
8560 return VINF_SUCCESS;
8561 }
8562 }
8563 AssertFailedReturn(VERR_IEM_IPE_9);
8564}
8565
8566
8567/** Opcode 0x6a. */
8568FNIEMOP_DEF(iemOp_push_Ib)
8569{
8570 IEMOP_MNEMONIC("push Ib");
8571 IEMOP_HLP_MIN_186();
8572 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8573 IEMOP_HLP_NO_LOCK_PREFIX();
8574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8575
8576 IEM_MC_BEGIN(0,0);
8577 switch (pIemCpu->enmEffOpSize)
8578 {
8579 case IEMMODE_16BIT:
8580 IEM_MC_PUSH_U16(i8Imm);
8581 break;
8582 case IEMMODE_32BIT:
8583 IEM_MC_PUSH_U32(i8Imm);
8584 break;
8585 case IEMMODE_64BIT:
8586 IEM_MC_PUSH_U64(i8Imm);
8587 break;
8588 }
8589 IEM_MC_ADVANCE_RIP();
8590 IEM_MC_END();
8591 return VINF_SUCCESS;
8592}
8593
8594
8595/** Opcode 0x6b. */
8596FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8597{
8598 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8599 IEMOP_HLP_MIN_186();
8600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8601 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8602
8603 switch (pIemCpu->enmEffOpSize)
8604 {
8605 case IEMMODE_16BIT:
8606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8607 {
8608 /* register operand */
8609 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8611
8612 IEM_MC_BEGIN(3, 1);
8613 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8614 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8615 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8616 IEM_MC_LOCAL(uint16_t, u16Tmp);
8617
8618 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8619 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8620 IEM_MC_REF_EFLAGS(pEFlags);
8621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8622 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8623
8624 IEM_MC_ADVANCE_RIP();
8625 IEM_MC_END();
8626 }
8627 else
8628 {
8629 /* memory operand */
8630 IEM_MC_BEGIN(3, 2);
8631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8632 IEM_MC_ARG(uint16_t, u16Src, 1);
8633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8634 IEM_MC_LOCAL(uint16_t, u16Tmp);
8635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8636
8637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8638 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8639 IEM_MC_ASSIGN(u16Src, u16Imm);
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8641 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8642 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8643 IEM_MC_REF_EFLAGS(pEFlags);
8644 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8645 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8646
8647 IEM_MC_ADVANCE_RIP();
8648 IEM_MC_END();
8649 }
8650 return VINF_SUCCESS;
8651
8652 case IEMMODE_32BIT:
8653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8654 {
8655 /* register operand */
8656 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658
8659 IEM_MC_BEGIN(3, 1);
8660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8661 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8663 IEM_MC_LOCAL(uint32_t, u32Tmp);
8664
8665 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8666 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8667 IEM_MC_REF_EFLAGS(pEFlags);
8668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8669 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8670
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 }
8674 else
8675 {
8676 /* memory operand */
8677 IEM_MC_BEGIN(3, 2);
8678 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8679 IEM_MC_ARG(uint32_t, u32Src, 1);
8680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8681 IEM_MC_LOCAL(uint32_t, u32Tmp);
8682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8683
8684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8685 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8686 IEM_MC_ASSIGN(u32Src, u32Imm);
8687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8688 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8689 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8690 IEM_MC_REF_EFLAGS(pEFlags);
8691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8692 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8693
8694 IEM_MC_ADVANCE_RIP();
8695 IEM_MC_END();
8696 }
8697 return VINF_SUCCESS;
8698
8699 case IEMMODE_64BIT:
8700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8701 {
8702 /* register operand */
8703 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705
8706 IEM_MC_BEGIN(3, 1);
8707 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8708 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8710 IEM_MC_LOCAL(uint64_t, u64Tmp);
8711
8712 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8713 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8714 IEM_MC_REF_EFLAGS(pEFlags);
8715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8716 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8717
8718 IEM_MC_ADVANCE_RIP();
8719 IEM_MC_END();
8720 }
8721 else
8722 {
8723 /* memory operand */
8724 IEM_MC_BEGIN(3, 2);
8725 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8726 IEM_MC_ARG(uint64_t, u64Src, 1);
8727 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8728 IEM_MC_LOCAL(uint64_t, u64Tmp);
8729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8730
8731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8732 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8733 IEM_MC_ASSIGN(u64Src, u64Imm);
8734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8735 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8736 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8737 IEM_MC_REF_EFLAGS(pEFlags);
8738 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8739 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8740
8741 IEM_MC_ADVANCE_RIP();
8742 IEM_MC_END();
8743 }
8744 return VINF_SUCCESS;
8745 }
8746 AssertFailedReturn(VERR_IEM_IPE_8);
8747}
8748
8749
8750/** Opcode 0x6c. */
8751FNIEMOP_DEF(iemOp_insb_Yb_DX)
8752{
8753 IEMOP_HLP_MIN_186();
8754 IEMOP_HLP_NO_LOCK_PREFIX();
8755 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8756 {
8757 IEMOP_MNEMONIC("rep ins Yb,DX");
8758 switch (pIemCpu->enmEffAddrMode)
8759 {
8760 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8761 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8762 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8764 }
8765 }
8766 else
8767 {
8768 IEMOP_MNEMONIC("ins Yb,DX");
8769 switch (pIemCpu->enmEffAddrMode)
8770 {
8771 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8772 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8773 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8775 }
8776 }
8777}
8778
8779
8780/** Opcode 0x6d. */
8781FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8782{
8783 IEMOP_HLP_MIN_186();
8784 IEMOP_HLP_NO_LOCK_PREFIX();
8785 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8786 {
8787 IEMOP_MNEMONIC("rep ins Yv,DX");
8788 switch (pIemCpu->enmEffOpSize)
8789 {
8790 case IEMMODE_16BIT:
8791 switch (pIemCpu->enmEffAddrMode)
8792 {
8793 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8794 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8795 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8797 }
8798 break;
8799 case IEMMODE_64BIT:
8800 case IEMMODE_32BIT:
8801 switch (pIemCpu->enmEffAddrMode)
8802 {
8803 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8804 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8805 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8807 }
8808 break;
8809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8810 }
8811 }
8812 else
8813 {
8814 IEMOP_MNEMONIC("ins Yv,DX");
8815 switch (pIemCpu->enmEffOpSize)
8816 {
8817 case IEMMODE_16BIT:
8818 switch (pIemCpu->enmEffAddrMode)
8819 {
8820 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8821 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8822 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8824 }
8825 break;
8826 case IEMMODE_64BIT:
8827 case IEMMODE_32BIT:
8828 switch (pIemCpu->enmEffAddrMode)
8829 {
8830 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8831 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8832 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8834 }
8835 break;
8836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8837 }
8838 }
8839}
8840
8841
8842/** Opcode 0x6e. */
8843FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8844{
8845 IEMOP_HLP_MIN_186();
8846 IEMOP_HLP_NO_LOCK_PREFIX();
8847 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8848 {
8849 IEMOP_MNEMONIC("rep outs DX,Yb");
8850 switch (pIemCpu->enmEffAddrMode)
8851 {
8852 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8853 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8854 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8856 }
8857 }
8858 else
8859 {
8860 IEMOP_MNEMONIC("outs DX,Yb");
8861 switch (pIemCpu->enmEffAddrMode)
8862 {
8863 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8864 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8865 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8867 }
8868 }
8869}
8870
8871
8872/** Opcode 0x6f. */
8873FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8874{
8875 IEMOP_HLP_MIN_186();
8876 IEMOP_HLP_NO_LOCK_PREFIX();
8877 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8878 {
8879 IEMOP_MNEMONIC("rep outs DX,Yv");
8880 switch (pIemCpu->enmEffOpSize)
8881 {
8882 case IEMMODE_16BIT:
8883 switch (pIemCpu->enmEffAddrMode)
8884 {
8885 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8886 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8887 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8889 }
8890 break;
8891 case IEMMODE_64BIT:
8892 case IEMMODE_32BIT:
8893 switch (pIemCpu->enmEffAddrMode)
8894 {
8895 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8896 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8897 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8899 }
8900 break;
8901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8902 }
8903 }
8904 else
8905 {
8906 IEMOP_MNEMONIC("outs DX,Yv");
8907 switch (pIemCpu->enmEffOpSize)
8908 {
8909 case IEMMODE_16BIT:
8910 switch (pIemCpu->enmEffAddrMode)
8911 {
8912 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8913 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8914 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8916 }
8917 break;
8918 case IEMMODE_64BIT:
8919 case IEMMODE_32BIT:
8920 switch (pIemCpu->enmEffAddrMode)
8921 {
8922 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8923 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8924 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8926 }
8927 break;
8928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8929 }
8930 }
8931}
8932
8933
8934/** Opcode 0x70. */
8935FNIEMOP_DEF(iemOp_jo_Jb)
8936{
8937 IEMOP_MNEMONIC("jo Jb");
8938 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8939 IEMOP_HLP_NO_LOCK_PREFIX();
8940 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8941
8942 IEM_MC_BEGIN(0, 0);
8943 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8944 IEM_MC_REL_JMP_S8(i8Imm);
8945 } IEM_MC_ELSE() {
8946 IEM_MC_ADVANCE_RIP();
8947 } IEM_MC_ENDIF();
8948 IEM_MC_END();
8949 return VINF_SUCCESS;
8950}
8951
8952
8953/** Opcode 0x71. */
8954FNIEMOP_DEF(iemOp_jno_Jb)
8955{
8956 IEMOP_MNEMONIC("jno Jb");
8957 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8958 IEMOP_HLP_NO_LOCK_PREFIX();
8959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8960
8961 IEM_MC_BEGIN(0, 0);
8962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8963 IEM_MC_ADVANCE_RIP();
8964 } IEM_MC_ELSE() {
8965 IEM_MC_REL_JMP_S8(i8Imm);
8966 } IEM_MC_ENDIF();
8967 IEM_MC_END();
8968 return VINF_SUCCESS;
8969}
8970
8971/** Opcode 0x72. */
8972FNIEMOP_DEF(iemOp_jc_Jb)
8973{
8974 IEMOP_MNEMONIC("jc/jnae Jb");
8975 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8976 IEMOP_HLP_NO_LOCK_PREFIX();
8977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8978
8979 IEM_MC_BEGIN(0, 0);
8980 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8981 IEM_MC_REL_JMP_S8(i8Imm);
8982 } IEM_MC_ELSE() {
8983 IEM_MC_ADVANCE_RIP();
8984 } IEM_MC_ENDIF();
8985 IEM_MC_END();
8986 return VINF_SUCCESS;
8987}
8988
8989
8990/** Opcode 0x73. */
8991FNIEMOP_DEF(iemOp_jnc_Jb)
8992{
8993 IEMOP_MNEMONIC("jnc/jnb Jb");
8994 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8995 IEMOP_HLP_NO_LOCK_PREFIX();
8996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8997
8998 IEM_MC_BEGIN(0, 0);
8999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9000 IEM_MC_ADVANCE_RIP();
9001 } IEM_MC_ELSE() {
9002 IEM_MC_REL_JMP_S8(i8Imm);
9003 } IEM_MC_ENDIF();
9004 IEM_MC_END();
9005 return VINF_SUCCESS;
9006}
9007
9008
9009/** Opcode 0x74. */
9010FNIEMOP_DEF(iemOp_je_Jb)
9011{
9012 IEMOP_MNEMONIC("je/jz Jb");
9013 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9014 IEMOP_HLP_NO_LOCK_PREFIX();
9015 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9016
9017 IEM_MC_BEGIN(0, 0);
9018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9019 IEM_MC_REL_JMP_S8(i8Imm);
9020 } IEM_MC_ELSE() {
9021 IEM_MC_ADVANCE_RIP();
9022 } IEM_MC_ENDIF();
9023 IEM_MC_END();
9024 return VINF_SUCCESS;
9025}
9026
9027
9028/** Opcode 0x75. */
9029FNIEMOP_DEF(iemOp_jne_Jb)
9030{
9031 IEMOP_MNEMONIC("jne/jnz Jb");
9032 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9033 IEMOP_HLP_NO_LOCK_PREFIX();
9034 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9035
9036 IEM_MC_BEGIN(0, 0);
9037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9038 IEM_MC_ADVANCE_RIP();
9039 } IEM_MC_ELSE() {
9040 IEM_MC_REL_JMP_S8(i8Imm);
9041 } IEM_MC_ENDIF();
9042 IEM_MC_END();
9043 return VINF_SUCCESS;
9044}
9045
9046
9047/** Opcode 0x76. */
9048FNIEMOP_DEF(iemOp_jbe_Jb)
9049{
9050 IEMOP_MNEMONIC("jbe/jna Jb");
9051 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9052 IEMOP_HLP_NO_LOCK_PREFIX();
9053 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9054
9055 IEM_MC_BEGIN(0, 0);
9056 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9057 IEM_MC_REL_JMP_S8(i8Imm);
9058 } IEM_MC_ELSE() {
9059 IEM_MC_ADVANCE_RIP();
9060 } IEM_MC_ENDIF();
9061 IEM_MC_END();
9062 return VINF_SUCCESS;
9063}
9064
9065
9066/** Opcode 0x77. */
9067FNIEMOP_DEF(iemOp_jnbe_Jb)
9068{
9069 IEMOP_MNEMONIC("jnbe/ja Jb");
9070 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9071 IEMOP_HLP_NO_LOCK_PREFIX();
9072 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9073
9074 IEM_MC_BEGIN(0, 0);
9075 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9076 IEM_MC_ADVANCE_RIP();
9077 } IEM_MC_ELSE() {
9078 IEM_MC_REL_JMP_S8(i8Imm);
9079 } IEM_MC_ENDIF();
9080 IEM_MC_END();
9081 return VINF_SUCCESS;
9082}
9083
9084
9085/** Opcode 0x78. */
9086FNIEMOP_DEF(iemOp_js_Jb)
9087{
9088 IEMOP_MNEMONIC("js Jb");
9089 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9090 IEMOP_HLP_NO_LOCK_PREFIX();
9091 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9092
9093 IEM_MC_BEGIN(0, 0);
9094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9095 IEM_MC_REL_JMP_S8(i8Imm);
9096 } IEM_MC_ELSE() {
9097 IEM_MC_ADVANCE_RIP();
9098 } IEM_MC_ENDIF();
9099 IEM_MC_END();
9100 return VINF_SUCCESS;
9101}
9102
9103
9104/** Opcode 0x79. */
9105FNIEMOP_DEF(iemOp_jns_Jb)
9106{
9107 IEMOP_MNEMONIC("jns Jb");
9108 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9109 IEMOP_HLP_NO_LOCK_PREFIX();
9110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9111
9112 IEM_MC_BEGIN(0, 0);
9113 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9114 IEM_MC_ADVANCE_RIP();
9115 } IEM_MC_ELSE() {
9116 IEM_MC_REL_JMP_S8(i8Imm);
9117 } IEM_MC_ENDIF();
9118 IEM_MC_END();
9119 return VINF_SUCCESS;
9120}
9121
9122
9123/** Opcode 0x7a. */
9124FNIEMOP_DEF(iemOp_jp_Jb)
9125{
9126 IEMOP_MNEMONIC("jp Jb");
9127 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9128 IEMOP_HLP_NO_LOCK_PREFIX();
9129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9130
9131 IEM_MC_BEGIN(0, 0);
9132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9133 IEM_MC_REL_JMP_S8(i8Imm);
9134 } IEM_MC_ELSE() {
9135 IEM_MC_ADVANCE_RIP();
9136 } IEM_MC_ENDIF();
9137 IEM_MC_END();
9138 return VINF_SUCCESS;
9139}
9140
9141
9142/** Opcode 0x7b. */
9143FNIEMOP_DEF(iemOp_jnp_Jb)
9144{
9145 IEMOP_MNEMONIC("jnp Jb");
9146 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9147 IEMOP_HLP_NO_LOCK_PREFIX();
9148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9149
9150 IEM_MC_BEGIN(0, 0);
9151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9152 IEM_MC_ADVANCE_RIP();
9153 } IEM_MC_ELSE() {
9154 IEM_MC_REL_JMP_S8(i8Imm);
9155 } IEM_MC_ENDIF();
9156 IEM_MC_END();
9157 return VINF_SUCCESS;
9158}
9159
9160
9161/** Opcode 0x7c. */
9162FNIEMOP_DEF(iemOp_jl_Jb)
9163{
9164 IEMOP_MNEMONIC("jl/jnge Jb");
9165 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9166 IEMOP_HLP_NO_LOCK_PREFIX();
9167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9168
9169 IEM_MC_BEGIN(0, 0);
9170 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9171 IEM_MC_REL_JMP_S8(i8Imm);
9172 } IEM_MC_ELSE() {
9173 IEM_MC_ADVANCE_RIP();
9174 } IEM_MC_ENDIF();
9175 IEM_MC_END();
9176 return VINF_SUCCESS;
9177}
9178
9179
9180/** Opcode 0x7d. */
9181FNIEMOP_DEF(iemOp_jnl_Jb)
9182{
9183 IEMOP_MNEMONIC("jnl/jge Jb");
9184 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9185 IEMOP_HLP_NO_LOCK_PREFIX();
9186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9187
9188 IEM_MC_BEGIN(0, 0);
9189 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9190 IEM_MC_ADVANCE_RIP();
9191 } IEM_MC_ELSE() {
9192 IEM_MC_REL_JMP_S8(i8Imm);
9193 } IEM_MC_ENDIF();
9194 IEM_MC_END();
9195 return VINF_SUCCESS;
9196}
9197
9198
9199/** Opcode 0x7e. */
9200FNIEMOP_DEF(iemOp_jle_Jb)
9201{
9202 IEMOP_MNEMONIC("jle/jng Jb");
9203 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9204 IEMOP_HLP_NO_LOCK_PREFIX();
9205 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9206
9207 IEM_MC_BEGIN(0, 0);
9208 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9209 IEM_MC_REL_JMP_S8(i8Imm);
9210 } IEM_MC_ELSE() {
9211 IEM_MC_ADVANCE_RIP();
9212 } IEM_MC_ENDIF();
9213 IEM_MC_END();
9214 return VINF_SUCCESS;
9215}
9216
9217
9218/** Opcode 0x7f. */
9219FNIEMOP_DEF(iemOp_jnle_Jb)
9220{
9221 IEMOP_MNEMONIC("jnle/jg Jb");
9222 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9223 IEMOP_HLP_NO_LOCK_PREFIX();
9224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9225
9226 IEM_MC_BEGIN(0, 0);
9227 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9228 IEM_MC_ADVANCE_RIP();
9229 } IEM_MC_ELSE() {
9230 IEM_MC_REL_JMP_S8(i8Imm);
9231 } IEM_MC_ENDIF();
9232 IEM_MC_END();
9233 return VINF_SUCCESS;
9234}
9235
9236
9237/** Opcode 0x80. */
9238FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9239{
9240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9241 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9242 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9243
9244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9245 {
9246 /* register target */
9247 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9248 IEMOP_HLP_NO_LOCK_PREFIX();
9249 IEM_MC_BEGIN(3, 0);
9250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9251 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9253
9254 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9255 IEM_MC_REF_EFLAGS(pEFlags);
9256 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9257
9258 IEM_MC_ADVANCE_RIP();
9259 IEM_MC_END();
9260 }
9261 else
9262 {
9263 /* memory target */
9264 uint32_t fAccess;
9265 if (pImpl->pfnLockedU8)
9266 fAccess = IEM_ACCESS_DATA_RW;
9267 else
9268 { /* CMP */
9269 IEMOP_HLP_NO_LOCK_PREFIX();
9270 fAccess = IEM_ACCESS_DATA_R;
9271 }
9272 IEM_MC_BEGIN(3, 2);
9273 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9276
9277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9278 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9279 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9280
9281 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9282 IEM_MC_FETCH_EFLAGS(EFlags);
9283 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9285 else
9286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9287
9288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9289 IEM_MC_COMMIT_EFLAGS(EFlags);
9290 IEM_MC_ADVANCE_RIP();
9291 IEM_MC_END();
9292 }
9293 return VINF_SUCCESS;
9294}
9295
9296
9297/** Opcode 0x81. */
9298FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9299{
9300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9301 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9302 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9303
9304 switch (pIemCpu->enmEffOpSize)
9305 {
9306 case IEMMODE_16BIT:
9307 {
9308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9309 {
9310 /* register target */
9311 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9312 IEMOP_HLP_NO_LOCK_PREFIX();
9313 IEM_MC_BEGIN(3, 0);
9314 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9315 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9317
9318 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9319 IEM_MC_REF_EFLAGS(pEFlags);
9320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9321
9322 IEM_MC_ADVANCE_RIP();
9323 IEM_MC_END();
9324 }
9325 else
9326 {
9327 /* memory target */
9328 uint32_t fAccess;
9329 if (pImpl->pfnLockedU16)
9330 fAccess = IEM_ACCESS_DATA_RW;
9331 else
9332 { /* CMP, TEST */
9333 IEMOP_HLP_NO_LOCK_PREFIX();
9334 fAccess = IEM_ACCESS_DATA_R;
9335 }
9336 IEM_MC_BEGIN(3, 2);
9337 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9338 IEM_MC_ARG(uint16_t, u16Src, 1);
9339 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9341
9342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9343 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9344 IEM_MC_ASSIGN(u16Src, u16Imm);
9345 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9346 IEM_MC_FETCH_EFLAGS(EFlags);
9347 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9349 else
9350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9351
9352 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9353 IEM_MC_COMMIT_EFLAGS(EFlags);
9354 IEM_MC_ADVANCE_RIP();
9355 IEM_MC_END();
9356 }
9357 break;
9358 }
9359
9360 case IEMMODE_32BIT:
9361 {
9362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9363 {
9364 /* register target */
9365 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9366 IEMOP_HLP_NO_LOCK_PREFIX();
9367 IEM_MC_BEGIN(3, 0);
9368 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9369 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9370 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9371
9372 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9373 IEM_MC_REF_EFLAGS(pEFlags);
9374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9375 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9376
9377 IEM_MC_ADVANCE_RIP();
9378 IEM_MC_END();
9379 }
9380 else
9381 {
9382 /* memory target */
9383 uint32_t fAccess;
9384 if (pImpl->pfnLockedU32)
9385 fAccess = IEM_ACCESS_DATA_RW;
9386 else
9387 { /* CMP, TEST */
9388 IEMOP_HLP_NO_LOCK_PREFIX();
9389 fAccess = IEM_ACCESS_DATA_R;
9390 }
9391 IEM_MC_BEGIN(3, 2);
9392 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9393 IEM_MC_ARG(uint32_t, u32Src, 1);
9394 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9396
9397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9398 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9399 IEM_MC_ASSIGN(u32Src, u32Imm);
9400 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9401 IEM_MC_FETCH_EFLAGS(EFlags);
9402 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9403 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9404 else
9405 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9406
9407 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9408 IEM_MC_COMMIT_EFLAGS(EFlags);
9409 IEM_MC_ADVANCE_RIP();
9410 IEM_MC_END();
9411 }
9412 break;
9413 }
9414
9415 case IEMMODE_64BIT:
9416 {
9417 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9418 {
9419 /* register target */
9420 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9421 IEMOP_HLP_NO_LOCK_PREFIX();
9422 IEM_MC_BEGIN(3, 0);
9423 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9424 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9425 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9426
9427 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9428 IEM_MC_REF_EFLAGS(pEFlags);
9429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9430
9431 IEM_MC_ADVANCE_RIP();
9432 IEM_MC_END();
9433 }
9434 else
9435 {
9436 /* memory target */
9437 uint32_t fAccess;
9438 if (pImpl->pfnLockedU64)
9439 fAccess = IEM_ACCESS_DATA_RW;
9440 else
9441 { /* CMP */
9442 IEMOP_HLP_NO_LOCK_PREFIX();
9443 fAccess = IEM_ACCESS_DATA_R;
9444 }
9445 IEM_MC_BEGIN(3, 2);
9446 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9447 IEM_MC_ARG(uint64_t, u64Src, 1);
9448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9450
9451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9452 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9453 IEM_MC_ASSIGN(u64Src, u64Imm);
9454 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9455 IEM_MC_FETCH_EFLAGS(EFlags);
9456 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9458 else
9459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9460
9461 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9462 IEM_MC_COMMIT_EFLAGS(EFlags);
9463 IEM_MC_ADVANCE_RIP();
9464 IEM_MC_END();
9465 }
9466 break;
9467 }
9468 }
9469 return VINF_SUCCESS;
9470}
9471
9472
9473/** Opcode 0x82. */
9474FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9475{
9476 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9477 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9478}
9479
9480
9481/** Opcode 0x83. */
9482FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9483{
9484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9485 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9486 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9487 to the 386 even if absent in the intel reference manuals and some
9488 3rd party opcode listings. */
9489 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9490
9491 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9492 {
9493 /*
9494 * Register target
9495 */
9496 IEMOP_HLP_NO_LOCK_PREFIX();
9497 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9498 switch (pIemCpu->enmEffOpSize)
9499 {
9500 case IEMMODE_16BIT:
9501 {
9502 IEM_MC_BEGIN(3, 0);
9503 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9504 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9505 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9506
9507 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9508 IEM_MC_REF_EFLAGS(pEFlags);
9509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9510
9511 IEM_MC_ADVANCE_RIP();
9512 IEM_MC_END();
9513 break;
9514 }
9515
9516 case IEMMODE_32BIT:
9517 {
9518 IEM_MC_BEGIN(3, 0);
9519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9520 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9522
9523 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9524 IEM_MC_REF_EFLAGS(pEFlags);
9525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9526 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9527
9528 IEM_MC_ADVANCE_RIP();
9529 IEM_MC_END();
9530 break;
9531 }
9532
9533 case IEMMODE_64BIT:
9534 {
9535 IEM_MC_BEGIN(3, 0);
9536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9537 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9539
9540 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9541 IEM_MC_REF_EFLAGS(pEFlags);
9542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9543
9544 IEM_MC_ADVANCE_RIP();
9545 IEM_MC_END();
9546 break;
9547 }
9548 }
9549 }
9550 else
9551 {
9552 /*
9553 * Memory target.
9554 */
9555 uint32_t fAccess;
9556 if (pImpl->pfnLockedU16)
9557 fAccess = IEM_ACCESS_DATA_RW;
9558 else
9559 { /* CMP */
9560 IEMOP_HLP_NO_LOCK_PREFIX();
9561 fAccess = IEM_ACCESS_DATA_R;
9562 }
9563
9564 switch (pIemCpu->enmEffOpSize)
9565 {
9566 case IEMMODE_16BIT:
9567 {
9568 IEM_MC_BEGIN(3, 2);
9569 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9570 IEM_MC_ARG(uint16_t, u16Src, 1);
9571 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9573
9574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9575 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9576 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9577 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9578 IEM_MC_FETCH_EFLAGS(EFlags);
9579 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9581 else
9582 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9583
9584 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9585 IEM_MC_COMMIT_EFLAGS(EFlags);
9586 IEM_MC_ADVANCE_RIP();
9587 IEM_MC_END();
9588 break;
9589 }
9590
9591 case IEMMODE_32BIT:
9592 {
9593 IEM_MC_BEGIN(3, 2);
9594 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9595 IEM_MC_ARG(uint32_t, u32Src, 1);
9596 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9598
9599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9600 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9601 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9602 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9603 IEM_MC_FETCH_EFLAGS(EFlags);
9604 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9606 else
9607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9608
9609 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9610 IEM_MC_COMMIT_EFLAGS(EFlags);
9611 IEM_MC_ADVANCE_RIP();
9612 IEM_MC_END();
9613 break;
9614 }
9615
9616 case IEMMODE_64BIT:
9617 {
9618 IEM_MC_BEGIN(3, 2);
9619 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9620 IEM_MC_ARG(uint64_t, u64Src, 1);
9621 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9623
9624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9625 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9626 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9627 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9628 IEM_MC_FETCH_EFLAGS(EFlags);
9629 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9630 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9631 else
9632 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9633
9634 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9635 IEM_MC_COMMIT_EFLAGS(EFlags);
9636 IEM_MC_ADVANCE_RIP();
9637 IEM_MC_END();
9638 break;
9639 }
9640 }
9641 }
9642 return VINF_SUCCESS;
9643}
9644
9645
9646/** Opcode 0x84. */
9647FNIEMOP_DEF(iemOp_test_Eb_Gb)
9648{
9649 IEMOP_MNEMONIC("test Eb,Gb");
9650 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9651 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9652 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9653}
9654
9655
9656/** Opcode 0x85. */
9657FNIEMOP_DEF(iemOp_test_Ev_Gv)
9658{
9659 IEMOP_MNEMONIC("test Ev,Gv");
9660 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9661 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9662 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9663}
9664
9665
9666/** Opcode 0x86. */
9667FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9668{
9669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9670 IEMOP_MNEMONIC("xchg Eb,Gb");
9671
9672 /*
9673 * If rm is denoting a register, no more instruction bytes.
9674 */
9675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9676 {
9677 IEMOP_HLP_NO_LOCK_PREFIX();
9678
9679 IEM_MC_BEGIN(0, 2);
9680 IEM_MC_LOCAL(uint8_t, uTmp1);
9681 IEM_MC_LOCAL(uint8_t, uTmp2);
9682
9683 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9684 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9685 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9686 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9687
9688 IEM_MC_ADVANCE_RIP();
9689 IEM_MC_END();
9690 }
9691 else
9692 {
9693 /*
9694 * We're accessing memory.
9695 */
9696/** @todo the register must be committed separately! */
9697 IEM_MC_BEGIN(2, 2);
9698 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9699 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9701
9702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9703 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9704 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9705 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9707
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 }
9711 return VINF_SUCCESS;
9712}
9713
9714
9715/** Opcode 0x87. */
9716FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9717{
9718 IEMOP_MNEMONIC("xchg Ev,Gv");
9719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9720
9721 /*
9722 * If rm is denoting a register, no more instruction bytes.
9723 */
9724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9725 {
9726 IEMOP_HLP_NO_LOCK_PREFIX();
9727
9728 switch (pIemCpu->enmEffOpSize)
9729 {
9730 case IEMMODE_16BIT:
9731 IEM_MC_BEGIN(0, 2);
9732 IEM_MC_LOCAL(uint16_t, uTmp1);
9733 IEM_MC_LOCAL(uint16_t, uTmp2);
9734
9735 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9736 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9737 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9738 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9739
9740 IEM_MC_ADVANCE_RIP();
9741 IEM_MC_END();
9742 return VINF_SUCCESS;
9743
9744 case IEMMODE_32BIT:
9745 IEM_MC_BEGIN(0, 2);
9746 IEM_MC_LOCAL(uint32_t, uTmp1);
9747 IEM_MC_LOCAL(uint32_t, uTmp2);
9748
9749 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9750 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9751 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9752 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9753
9754 IEM_MC_ADVANCE_RIP();
9755 IEM_MC_END();
9756 return VINF_SUCCESS;
9757
9758 case IEMMODE_64BIT:
9759 IEM_MC_BEGIN(0, 2);
9760 IEM_MC_LOCAL(uint64_t, uTmp1);
9761 IEM_MC_LOCAL(uint64_t, uTmp2);
9762
9763 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9764 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9765 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9766 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9767
9768 IEM_MC_ADVANCE_RIP();
9769 IEM_MC_END();
9770 return VINF_SUCCESS;
9771
9772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9773 }
9774 }
9775 else
9776 {
9777 /*
9778 * We're accessing memory.
9779 */
9780 switch (pIemCpu->enmEffOpSize)
9781 {
9782/** @todo the register must be committed separately! */
9783 case IEMMODE_16BIT:
9784 IEM_MC_BEGIN(2, 2);
9785 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9786 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9788
9789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9790 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9791 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9792 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9793 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9794
9795 IEM_MC_ADVANCE_RIP();
9796 IEM_MC_END();
9797 return VINF_SUCCESS;
9798
9799 case IEMMODE_32BIT:
9800 IEM_MC_BEGIN(2, 2);
9801 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9802 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9804
9805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9806 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9807 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9808 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9810
9811 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9812 IEM_MC_ADVANCE_RIP();
9813 IEM_MC_END();
9814 return VINF_SUCCESS;
9815
9816 case IEMMODE_64BIT:
9817 IEM_MC_BEGIN(2, 2);
9818 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9819 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9821
9822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9823 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9824 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9825 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9826 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9827
9828 IEM_MC_ADVANCE_RIP();
9829 IEM_MC_END();
9830 return VINF_SUCCESS;
9831
9832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9833 }
9834 }
9835}
9836
9837
9838/** Opcode 0x88. */
9839FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9840{
9841 IEMOP_MNEMONIC("mov Eb,Gb");
9842
9843 uint8_t bRm;
9844 IEM_OPCODE_GET_NEXT_U8(&bRm);
9845 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9846
9847 /*
9848 * If rm is denoting a register, no more instruction bytes.
9849 */
9850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9851 {
9852 IEM_MC_BEGIN(0, 1);
9853 IEM_MC_LOCAL(uint8_t, u8Value);
9854 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9855 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9856 IEM_MC_ADVANCE_RIP();
9857 IEM_MC_END();
9858 }
9859 else
9860 {
9861 /*
9862 * We're writing a register to memory.
9863 */
9864 IEM_MC_BEGIN(0, 2);
9865 IEM_MC_LOCAL(uint8_t, u8Value);
9866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9868 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9869 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9870 IEM_MC_ADVANCE_RIP();
9871 IEM_MC_END();
9872 }
9873 return VINF_SUCCESS;
9874
9875}
9876
9877
9878/** Opcode 0x89. */
9879FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9880{
9881 IEMOP_MNEMONIC("mov Ev,Gv");
9882
9883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9884 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9885
9886 /*
9887 * If rm is denoting a register, no more instruction bytes.
9888 */
9889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9890 {
9891 switch (pIemCpu->enmEffOpSize)
9892 {
9893 case IEMMODE_16BIT:
9894 IEM_MC_BEGIN(0, 1);
9895 IEM_MC_LOCAL(uint16_t, u16Value);
9896 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9897 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9898 IEM_MC_ADVANCE_RIP();
9899 IEM_MC_END();
9900 break;
9901
9902 case IEMMODE_32BIT:
9903 IEM_MC_BEGIN(0, 1);
9904 IEM_MC_LOCAL(uint32_t, u32Value);
9905 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9906 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9907 IEM_MC_ADVANCE_RIP();
9908 IEM_MC_END();
9909 break;
9910
9911 case IEMMODE_64BIT:
9912 IEM_MC_BEGIN(0, 1);
9913 IEM_MC_LOCAL(uint64_t, u64Value);
9914 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9915 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9916 IEM_MC_ADVANCE_RIP();
9917 IEM_MC_END();
9918 break;
9919 }
9920 }
9921 else
9922 {
9923 /*
9924 * We're writing a register to memory.
9925 */
9926 switch (pIemCpu->enmEffOpSize)
9927 {
9928 case IEMMODE_16BIT:
9929 IEM_MC_BEGIN(0, 2);
9930 IEM_MC_LOCAL(uint16_t, u16Value);
9931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9933 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9934 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9935 IEM_MC_ADVANCE_RIP();
9936 IEM_MC_END();
9937 break;
9938
9939 case IEMMODE_32BIT:
9940 IEM_MC_BEGIN(0, 2);
9941 IEM_MC_LOCAL(uint32_t, u32Value);
9942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9944 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9945 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9946 IEM_MC_ADVANCE_RIP();
9947 IEM_MC_END();
9948 break;
9949
9950 case IEMMODE_64BIT:
9951 IEM_MC_BEGIN(0, 2);
9952 IEM_MC_LOCAL(uint64_t, u64Value);
9953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9955 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9956 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9957 IEM_MC_ADVANCE_RIP();
9958 IEM_MC_END();
9959 break;
9960 }
9961 }
9962 return VINF_SUCCESS;
9963}
9964
9965
9966/** Opcode 0x8a. */
9967FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9968{
9969 IEMOP_MNEMONIC("mov Gb,Eb");
9970
9971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9972 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9973
9974 /*
9975 * If rm is denoting a register, no more instruction bytes.
9976 */
9977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9978 {
9979 IEM_MC_BEGIN(0, 1);
9980 IEM_MC_LOCAL(uint8_t, u8Value);
9981 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9982 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9983 IEM_MC_ADVANCE_RIP();
9984 IEM_MC_END();
9985 }
9986 else
9987 {
9988 /*
9989 * We're loading a register from memory.
9990 */
9991 IEM_MC_BEGIN(0, 2);
9992 IEM_MC_LOCAL(uint8_t, u8Value);
9993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9995 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
9996 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9997 IEM_MC_ADVANCE_RIP();
9998 IEM_MC_END();
9999 }
10000 return VINF_SUCCESS;
10001}
10002
10003
10004/** Opcode 0x8b. */
10005FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10006{
10007 IEMOP_MNEMONIC("mov Gv,Ev");
10008
10009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10010 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10011
10012 /*
10013 * If rm is denoting a register, no more instruction bytes.
10014 */
10015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10016 {
10017 switch (pIemCpu->enmEffOpSize)
10018 {
10019 case IEMMODE_16BIT:
10020 IEM_MC_BEGIN(0, 1);
10021 IEM_MC_LOCAL(uint16_t, u16Value);
10022 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10023 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10024 IEM_MC_ADVANCE_RIP();
10025 IEM_MC_END();
10026 break;
10027
10028 case IEMMODE_32BIT:
10029 IEM_MC_BEGIN(0, 1);
10030 IEM_MC_LOCAL(uint32_t, u32Value);
10031 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10032 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10033 IEM_MC_ADVANCE_RIP();
10034 IEM_MC_END();
10035 break;
10036
10037 case IEMMODE_64BIT:
10038 IEM_MC_BEGIN(0, 1);
10039 IEM_MC_LOCAL(uint64_t, u64Value);
10040 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10042 IEM_MC_ADVANCE_RIP();
10043 IEM_MC_END();
10044 break;
10045 }
10046 }
10047 else
10048 {
10049 /*
10050 * We're loading a register from memory.
10051 */
10052 switch (pIemCpu->enmEffOpSize)
10053 {
10054 case IEMMODE_16BIT:
10055 IEM_MC_BEGIN(0, 2);
10056 IEM_MC_LOCAL(uint16_t, u16Value);
10057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10059 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10060 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10061 IEM_MC_ADVANCE_RIP();
10062 IEM_MC_END();
10063 break;
10064
10065 case IEMMODE_32BIT:
10066 IEM_MC_BEGIN(0, 2);
10067 IEM_MC_LOCAL(uint32_t, u32Value);
10068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10070 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10071 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10072 IEM_MC_ADVANCE_RIP();
10073 IEM_MC_END();
10074 break;
10075
10076 case IEMMODE_64BIT:
10077 IEM_MC_BEGIN(0, 2);
10078 IEM_MC_LOCAL(uint64_t, u64Value);
10079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10081 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10082 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10083 IEM_MC_ADVANCE_RIP();
10084 IEM_MC_END();
10085 break;
10086 }
10087 }
10088 return VINF_SUCCESS;
10089}
10090
10091
10092/** Opcode 0x63. */
10093FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10094{
10095 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10096 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10097 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10098 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10099 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10100}
10101
10102
10103/** Opcode 0x8c. */
10104FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10105{
10106 IEMOP_MNEMONIC("mov Ev,Sw");
10107
10108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10109 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10110
10111 /*
10112 * Check that the destination register exists. The REX.R prefix is ignored.
10113 */
10114 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10115 if ( iSegReg > X86_SREG_GS)
10116 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10117
10118 /*
10119 * If rm is denoting a register, no more instruction bytes.
10120 * In that case, the operand size is respected and the upper bits are
10121 * cleared (starting with some pentium).
10122 */
10123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10124 {
10125 switch (pIemCpu->enmEffOpSize)
10126 {
10127 case IEMMODE_16BIT:
10128 IEM_MC_BEGIN(0, 1);
10129 IEM_MC_LOCAL(uint16_t, u16Value);
10130 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10131 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10132 IEM_MC_ADVANCE_RIP();
10133 IEM_MC_END();
10134 break;
10135
10136 case IEMMODE_32BIT:
10137 IEM_MC_BEGIN(0, 1);
10138 IEM_MC_LOCAL(uint32_t, u32Value);
10139 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10140 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10141 IEM_MC_ADVANCE_RIP();
10142 IEM_MC_END();
10143 break;
10144
10145 case IEMMODE_64BIT:
10146 IEM_MC_BEGIN(0, 1);
10147 IEM_MC_LOCAL(uint64_t, u64Value);
10148 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10149 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10150 IEM_MC_ADVANCE_RIP();
10151 IEM_MC_END();
10152 break;
10153 }
10154 }
10155 else
10156 {
10157 /*
10158 * We're saving the register to memory. The access is word sized
10159 * regardless of operand size prefixes.
10160 */
10161#if 0 /* not necessary */
10162 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10163#endif
10164 IEM_MC_BEGIN(0, 2);
10165 IEM_MC_LOCAL(uint16_t, u16Value);
10166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10168 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10169 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10170 IEM_MC_ADVANCE_RIP();
10171 IEM_MC_END();
10172 }
10173 return VINF_SUCCESS;
10174}
10175
10176
10177
10178
10179/** Opcode 0x8d. */
10180FNIEMOP_DEF(iemOp_lea_Gv_M)
10181{
10182 IEMOP_MNEMONIC("lea Gv,M");
10183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10184 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10186 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10187
10188 switch (pIemCpu->enmEffOpSize)
10189 {
10190 case IEMMODE_16BIT:
10191 IEM_MC_BEGIN(0, 2);
10192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10193 IEM_MC_LOCAL(uint16_t, u16Cast);
10194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10195 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10196 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10197 IEM_MC_ADVANCE_RIP();
10198 IEM_MC_END();
10199 return VINF_SUCCESS;
10200
10201 case IEMMODE_32BIT:
10202 IEM_MC_BEGIN(0, 2);
10203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10204 IEM_MC_LOCAL(uint32_t, u32Cast);
10205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10206 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10207 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10208 IEM_MC_ADVANCE_RIP();
10209 IEM_MC_END();
10210 return VINF_SUCCESS;
10211
10212 case IEMMODE_64BIT:
10213 IEM_MC_BEGIN(0, 1);
10214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10216 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10217 IEM_MC_ADVANCE_RIP();
10218 IEM_MC_END();
10219 return VINF_SUCCESS;
10220 }
10221 AssertFailedReturn(VERR_IEM_IPE_7);
10222}
10223
10224
10225/** Opcode 0x8e. */
10226FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10227{
10228 IEMOP_MNEMONIC("mov Sw,Ev");
10229
10230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10231 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10232
10233 /*
10234 * The practical operand size is 16-bit.
10235 */
10236#if 0 /* not necessary */
10237 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10238#endif
10239
10240 /*
10241 * Check that the destination register exists and can be used with this
10242 * instruction. The REX.R prefix is ignored.
10243 */
10244 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10245 if ( iSegReg == X86_SREG_CS
10246 || iSegReg > X86_SREG_GS)
10247 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10248
10249 /*
10250 * If rm is denoting a register, no more instruction bytes.
10251 */
10252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10253 {
10254 IEM_MC_BEGIN(2, 0);
10255 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10256 IEM_MC_ARG(uint16_t, u16Value, 1);
10257 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10258 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10259 IEM_MC_END();
10260 }
10261 else
10262 {
10263 /*
10264 * We're loading the register from memory. The access is word sized
10265 * regardless of operand size prefixes.
10266 */
10267 IEM_MC_BEGIN(2, 1);
10268 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10269 IEM_MC_ARG(uint16_t, u16Value, 1);
10270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10272 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10273 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10274 IEM_MC_END();
10275 }
10276 return VINF_SUCCESS;
10277}
10278
10279
10280/** Opcode 0x8f /0. */
10281FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10282{
10283 /* This bugger is rather annoying as it requires rSP to be updated before
10284 doing the effective address calculations. Will eventually require a
10285 split between the R/M+SIB decoding and the effective address
10286 calculation - which is something that is required for any attempt at
10287 reusing this code for a recompiler. It may also be good to have if we
10288 need to delay #UD exception caused by invalid lock prefixes.
10289
10290 For now, we'll do a mostly safe interpreter-only implementation here. */
10291 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10292 * now until tests show it's checked.. */
10293 IEMOP_MNEMONIC("pop Ev");
10294 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10295
10296 /* Register access is relatively easy and can share code. */
10297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10298 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10299
10300 /*
10301 * Memory target.
10302 *
10303 * Intel says that RSP is incremented before it's used in any effective
10304 * address calcuations. This means some serious extra annoyance here since
10305 * we decode and calculate the effective address in one step and like to
10306 * delay committing registers till everything is done.
10307 *
10308 * So, we'll decode and calculate the effective address twice. This will
10309 * require some recoding if turned into a recompiler.
10310 */
10311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10312
10313#ifndef TST_IEM_CHECK_MC
10314 /* Calc effective address with modified ESP. */
10315 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10316 RTGCPTR GCPtrEff;
10317 VBOXSTRICTRC rcStrict;
10318 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10319 if (rcStrict != VINF_SUCCESS)
10320 return rcStrict;
10321 pIemCpu->offOpcode = offOpcodeSaved;
10322
10323 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10324 uint64_t const RspSaved = pCtx->rsp;
10325 switch (pIemCpu->enmEffOpSize)
10326 {
10327 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10328 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10329 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10331 }
10332 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10333 Assert(rcStrict == VINF_SUCCESS);
10334 pCtx->rsp = RspSaved;
10335
10336 /* Perform the operation - this should be CImpl. */
10337 RTUINT64U TmpRsp;
10338 TmpRsp.u = pCtx->rsp;
10339 switch (pIemCpu->enmEffOpSize)
10340 {
10341 case IEMMODE_16BIT:
10342 {
10343 uint16_t u16Value;
10344 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10345 if (rcStrict == VINF_SUCCESS)
10346 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10347 break;
10348 }
10349
10350 case IEMMODE_32BIT:
10351 {
10352 uint32_t u32Value;
10353 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10354 if (rcStrict == VINF_SUCCESS)
10355 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10356 break;
10357 }
10358
10359 case IEMMODE_64BIT:
10360 {
10361 uint64_t u64Value;
10362 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10363 if (rcStrict == VINF_SUCCESS)
10364 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10365 break;
10366 }
10367
10368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10369 }
10370 if (rcStrict == VINF_SUCCESS)
10371 {
10372 pCtx->rsp = TmpRsp.u;
10373 iemRegUpdateRipAndClearRF(pIemCpu);
10374 }
10375 return rcStrict;
10376
10377#else
10378 return VERR_IEM_IPE_2;
10379#endif
10380}
10381
10382
10383/** Opcode 0x8f. */
10384FNIEMOP_DEF(iemOp_Grp1A)
10385{
10386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10387 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10388 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10389
10390 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10391 /** @todo XOP decoding. */
10392 IEMOP_MNEMONIC("3-byte-xop");
10393 return IEMOP_RAISE_INVALID_OPCODE();
10394}
10395
10396
10397/**
10398 * Common 'xchg reg,rAX' helper.
10399 */
10400FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10401{
10402 IEMOP_HLP_NO_LOCK_PREFIX();
10403
10404 iReg |= pIemCpu->uRexB;
10405 switch (pIemCpu->enmEffOpSize)
10406 {
10407 case IEMMODE_16BIT:
10408 IEM_MC_BEGIN(0, 2);
10409 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10410 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10411 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10412 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10413 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10414 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10415 IEM_MC_ADVANCE_RIP();
10416 IEM_MC_END();
10417 return VINF_SUCCESS;
10418
10419 case IEMMODE_32BIT:
10420 IEM_MC_BEGIN(0, 2);
10421 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10422 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10423 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10424 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10425 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10426 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10427 IEM_MC_ADVANCE_RIP();
10428 IEM_MC_END();
10429 return VINF_SUCCESS;
10430
10431 case IEMMODE_64BIT:
10432 IEM_MC_BEGIN(0, 2);
10433 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10434 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10435 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10436 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10437 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10438 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10439 IEM_MC_ADVANCE_RIP();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442
10443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10444 }
10445}
10446
10447
10448/** Opcode 0x90. */
10449FNIEMOP_DEF(iemOp_nop)
10450{
10451 /* R8/R8D and RAX/EAX can be exchanged. */
10452 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10453 {
10454 IEMOP_MNEMONIC("xchg r8,rAX");
10455 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10456 }
10457
10458 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10459 IEMOP_MNEMONIC("pause");
10460 else
10461 IEMOP_MNEMONIC("nop");
10462 IEM_MC_BEGIN(0, 0);
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 return VINF_SUCCESS;
10466}
10467
10468
10469/** Opcode 0x91. */
10470FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10471{
10472 IEMOP_MNEMONIC("xchg rCX,rAX");
10473 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10474}
10475
10476
10477/** Opcode 0x92. */
10478FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10479{
10480 IEMOP_MNEMONIC("xchg rDX,rAX");
10481 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10482}
10483
10484
10485/** Opcode 0x93. */
10486FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10487{
10488 IEMOP_MNEMONIC("xchg rBX,rAX");
10489 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10490}
10491
10492
10493/** Opcode 0x94. */
10494FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10495{
10496 IEMOP_MNEMONIC("xchg rSX,rAX");
10497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10498}
10499
10500
10501/** Opcode 0x95. */
10502FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10503{
10504 IEMOP_MNEMONIC("xchg rBP,rAX");
10505 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10506}
10507
10508
10509/** Opcode 0x96. */
10510FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10511{
10512 IEMOP_MNEMONIC("xchg rSI,rAX");
10513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10514}
10515
10516
10517/** Opcode 0x97. */
10518FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10519{
10520 IEMOP_MNEMONIC("xchg rDI,rAX");
10521 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10522}
10523
10524
10525/** Opcode 0x98. */
10526FNIEMOP_DEF(iemOp_cbw)
10527{
10528 IEMOP_HLP_NO_LOCK_PREFIX();
10529 switch (pIemCpu->enmEffOpSize)
10530 {
10531 case IEMMODE_16BIT:
10532 IEMOP_MNEMONIC("cbw");
10533 IEM_MC_BEGIN(0, 1);
10534 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10535 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10536 } IEM_MC_ELSE() {
10537 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10538 } IEM_MC_ENDIF();
10539 IEM_MC_ADVANCE_RIP();
10540 IEM_MC_END();
10541 return VINF_SUCCESS;
10542
10543 case IEMMODE_32BIT:
10544 IEMOP_MNEMONIC("cwde");
10545 IEM_MC_BEGIN(0, 1);
10546 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10547 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10548 } IEM_MC_ELSE() {
10549 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10550 } IEM_MC_ENDIF();
10551 IEM_MC_ADVANCE_RIP();
10552 IEM_MC_END();
10553 return VINF_SUCCESS;
10554
10555 case IEMMODE_64BIT:
10556 IEMOP_MNEMONIC("cdqe");
10557 IEM_MC_BEGIN(0, 1);
10558 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10559 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10560 } IEM_MC_ELSE() {
10561 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10562 } IEM_MC_ENDIF();
10563 IEM_MC_ADVANCE_RIP();
10564 IEM_MC_END();
10565 return VINF_SUCCESS;
10566
10567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10568 }
10569}
10570
10571
10572/** Opcode 0x99. */
10573FNIEMOP_DEF(iemOp_cwd)
10574{
10575 IEMOP_HLP_NO_LOCK_PREFIX();
10576 switch (pIemCpu->enmEffOpSize)
10577 {
10578 case IEMMODE_16BIT:
10579 IEMOP_MNEMONIC("cwd");
10580 IEM_MC_BEGIN(0, 1);
10581 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10582 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10583 } IEM_MC_ELSE() {
10584 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10585 } IEM_MC_ENDIF();
10586 IEM_MC_ADVANCE_RIP();
10587 IEM_MC_END();
10588 return VINF_SUCCESS;
10589
10590 case IEMMODE_32BIT:
10591 IEMOP_MNEMONIC("cdq");
10592 IEM_MC_BEGIN(0, 1);
10593 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10594 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10595 } IEM_MC_ELSE() {
10596 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10597 } IEM_MC_ENDIF();
10598 IEM_MC_ADVANCE_RIP();
10599 IEM_MC_END();
10600 return VINF_SUCCESS;
10601
10602 case IEMMODE_64BIT:
10603 IEMOP_MNEMONIC("cqo");
10604 IEM_MC_BEGIN(0, 1);
10605 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10606 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10607 } IEM_MC_ELSE() {
10608 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10609 } IEM_MC_ENDIF();
10610 IEM_MC_ADVANCE_RIP();
10611 IEM_MC_END();
10612 return VINF_SUCCESS;
10613
10614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10615 }
10616}
10617
10618
10619/** Opcode 0x9a. */
10620FNIEMOP_DEF(iemOp_call_Ap)
10621{
10622 IEMOP_MNEMONIC("call Ap");
10623 IEMOP_HLP_NO_64BIT();
10624
10625 /* Decode the far pointer address and pass it on to the far call C implementation. */
10626 uint32_t offSeg;
10627 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10628 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10629 else
10630 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10631 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10633 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10634}
10635
10636
10637/** Opcode 0x9b. (aka fwait) */
10638FNIEMOP_DEF(iemOp_wait)
10639{
10640 IEMOP_MNEMONIC("wait");
10641 IEMOP_HLP_NO_LOCK_PREFIX();
10642
10643 IEM_MC_BEGIN(0, 0);
10644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10646 IEM_MC_ADVANCE_RIP();
10647 IEM_MC_END();
10648 return VINF_SUCCESS;
10649}
10650
10651
10652/** Opcode 0x9c. */
10653FNIEMOP_DEF(iemOp_pushf_Fv)
10654{
10655 IEMOP_HLP_NO_LOCK_PREFIX();
10656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10657 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10658}
10659
10660
10661/** Opcode 0x9d. */
10662FNIEMOP_DEF(iemOp_popf_Fv)
10663{
10664 IEMOP_HLP_NO_LOCK_PREFIX();
10665 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10666 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10667}
10668
10669
10670/** Opcode 0x9e. */
10671FNIEMOP_DEF(iemOp_sahf)
10672{
10673 IEMOP_MNEMONIC("sahf");
10674 IEMOP_HLP_NO_LOCK_PREFIX();
10675 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10676 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10677 return IEMOP_RAISE_INVALID_OPCODE();
10678 IEM_MC_BEGIN(0, 2);
10679 IEM_MC_LOCAL(uint32_t, u32Flags);
10680 IEM_MC_LOCAL(uint32_t, EFlags);
10681 IEM_MC_FETCH_EFLAGS(EFlags);
10682 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10683 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10684 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10685 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10686 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10687 IEM_MC_COMMIT_EFLAGS(EFlags);
10688 IEM_MC_ADVANCE_RIP();
10689 IEM_MC_END();
10690 return VINF_SUCCESS;
10691}
10692
10693
10694/** Opcode 0x9f. */
10695FNIEMOP_DEF(iemOp_lahf)
10696{
10697 IEMOP_MNEMONIC("lahf");
10698 IEMOP_HLP_NO_LOCK_PREFIX();
10699 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10700 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10701 return IEMOP_RAISE_INVALID_OPCODE();
10702 IEM_MC_BEGIN(0, 1);
10703 IEM_MC_LOCAL(uint8_t, u8Flags);
10704 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10705 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10706 IEM_MC_ADVANCE_RIP();
10707 IEM_MC_END();
10708 return VINF_SUCCESS;
10709}
10710
10711
10712/**
10713 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10714 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10715 * prefixes. Will return on failures.
10716 * @param a_GCPtrMemOff The variable to store the offset in.
10717 */
10718#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10719 do \
10720 { \
10721 switch (pIemCpu->enmEffAddrMode) \
10722 { \
10723 case IEMMODE_16BIT: \
10724 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10725 break; \
10726 case IEMMODE_32BIT: \
10727 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10728 break; \
10729 case IEMMODE_64BIT: \
10730 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10731 break; \
10732 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10733 } \
10734 IEMOP_HLP_NO_LOCK_PREFIX(); \
10735 } while (0)
10736
10737/** Opcode 0xa0. */
10738FNIEMOP_DEF(iemOp_mov_Al_Ob)
10739{
10740 /*
10741 * Get the offset and fend of lock prefixes.
10742 */
10743 RTGCPTR GCPtrMemOff;
10744 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10745
10746 /*
10747 * Fetch AL.
10748 */
10749 IEM_MC_BEGIN(0,1);
10750 IEM_MC_LOCAL(uint8_t, u8Tmp);
10751 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10752 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10753 IEM_MC_ADVANCE_RIP();
10754 IEM_MC_END();
10755 return VINF_SUCCESS;
10756}
10757
10758
10759/** Opcode 0xa1. */
10760FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10761{
10762 /*
10763 * Get the offset and fend of lock prefixes.
10764 */
10765 IEMOP_MNEMONIC("mov rAX,Ov");
10766 RTGCPTR GCPtrMemOff;
10767 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10768
10769 /*
10770 * Fetch rAX.
10771 */
10772 switch (pIemCpu->enmEffOpSize)
10773 {
10774 case IEMMODE_16BIT:
10775 IEM_MC_BEGIN(0,1);
10776 IEM_MC_LOCAL(uint16_t, u16Tmp);
10777 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10778 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10779 IEM_MC_ADVANCE_RIP();
10780 IEM_MC_END();
10781 return VINF_SUCCESS;
10782
10783 case IEMMODE_32BIT:
10784 IEM_MC_BEGIN(0,1);
10785 IEM_MC_LOCAL(uint32_t, u32Tmp);
10786 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10787 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10788 IEM_MC_ADVANCE_RIP();
10789 IEM_MC_END();
10790 return VINF_SUCCESS;
10791
10792 case IEMMODE_64BIT:
10793 IEM_MC_BEGIN(0,1);
10794 IEM_MC_LOCAL(uint64_t, u64Tmp);
10795 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10796 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10797 IEM_MC_ADVANCE_RIP();
10798 IEM_MC_END();
10799 return VINF_SUCCESS;
10800
10801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10802 }
10803}
10804
10805
10806/** Opcode 0xa2. */
10807FNIEMOP_DEF(iemOp_mov_Ob_AL)
10808{
10809 /*
10810 * Get the offset and fend of lock prefixes.
10811 */
10812 RTGCPTR GCPtrMemOff;
10813 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10814
10815 /*
10816 * Store AL.
10817 */
10818 IEM_MC_BEGIN(0,1);
10819 IEM_MC_LOCAL(uint8_t, u8Tmp);
10820 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10821 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10822 IEM_MC_ADVANCE_RIP();
10823 IEM_MC_END();
10824 return VINF_SUCCESS;
10825}
10826
10827
10828/** Opcode 0xa3. */
10829FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10830{
10831 /*
10832 * Get the offset and fend of lock prefixes.
10833 */
10834 RTGCPTR GCPtrMemOff;
10835 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10836
10837 /*
10838 * Store rAX.
10839 */
10840 switch (pIemCpu->enmEffOpSize)
10841 {
10842 case IEMMODE_16BIT:
10843 IEM_MC_BEGIN(0,1);
10844 IEM_MC_LOCAL(uint16_t, u16Tmp);
10845 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10846 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10847 IEM_MC_ADVANCE_RIP();
10848 IEM_MC_END();
10849 return VINF_SUCCESS;
10850
10851 case IEMMODE_32BIT:
10852 IEM_MC_BEGIN(0,1);
10853 IEM_MC_LOCAL(uint32_t, u32Tmp);
10854 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10855 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10856 IEM_MC_ADVANCE_RIP();
10857 IEM_MC_END();
10858 return VINF_SUCCESS;
10859
10860 case IEMMODE_64BIT:
10861 IEM_MC_BEGIN(0,1);
10862 IEM_MC_LOCAL(uint64_t, u64Tmp);
10863 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10864 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10865 IEM_MC_ADVANCE_RIP();
10866 IEM_MC_END();
10867 return VINF_SUCCESS;
10868
10869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10870 }
10871}
10872
10873/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10874#define IEM_MOVS_CASE(ValBits, AddrBits) \
10875 IEM_MC_BEGIN(0, 2); \
10876 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10877 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10878 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10879 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10880 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10881 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10883 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10884 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10885 } IEM_MC_ELSE() { \
10886 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10887 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10888 } IEM_MC_ENDIF(); \
10889 IEM_MC_ADVANCE_RIP(); \
10890 IEM_MC_END();
10891
10892/** Opcode 0xa4. */
10893FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10894{
10895 IEMOP_HLP_NO_LOCK_PREFIX();
10896
10897 /*
10898 * Use the C implementation if a repeat prefix is encountered.
10899 */
10900 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10901 {
10902 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10903 switch (pIemCpu->enmEffAddrMode)
10904 {
10905 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10906 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10907 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10909 }
10910 }
10911 IEMOP_MNEMONIC("movsb Xb,Yb");
10912
10913 /*
10914 * Sharing case implementation with movs[wdq] below.
10915 */
10916 switch (pIemCpu->enmEffAddrMode)
10917 {
10918 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10919 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10920 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10922 }
10923 return VINF_SUCCESS;
10924}
10925
10926
10927/** Opcode 0xa5. */
10928FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10929{
10930 IEMOP_HLP_NO_LOCK_PREFIX();
10931
10932 /*
10933 * Use the C implementation if a repeat prefix is encountered.
10934 */
10935 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10936 {
10937 IEMOP_MNEMONIC("rep movs Xv,Yv");
10938 switch (pIemCpu->enmEffOpSize)
10939 {
10940 case IEMMODE_16BIT:
10941 switch (pIemCpu->enmEffAddrMode)
10942 {
10943 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10944 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10945 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10947 }
10948 break;
10949 case IEMMODE_32BIT:
10950 switch (pIemCpu->enmEffAddrMode)
10951 {
10952 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10953 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10954 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10956 }
10957 case IEMMODE_64BIT:
10958 switch (pIemCpu->enmEffAddrMode)
10959 {
10960 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10961 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10962 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10964 }
10965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10966 }
10967 }
10968 IEMOP_MNEMONIC("movs Xv,Yv");
10969
10970 /*
10971 * Annoying double switch here.
10972 * Using ugly macro for implementing the cases, sharing it with movsb.
10973 */
10974 switch (pIemCpu->enmEffOpSize)
10975 {
10976 case IEMMODE_16BIT:
10977 switch (pIemCpu->enmEffAddrMode)
10978 {
10979 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10980 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10981 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10983 }
10984 break;
10985
10986 case IEMMODE_32BIT:
10987 switch (pIemCpu->enmEffAddrMode)
10988 {
10989 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
10990 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
10991 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
10992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10993 }
10994 break;
10995
10996 case IEMMODE_64BIT:
10997 switch (pIemCpu->enmEffAddrMode)
10998 {
10999 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11000 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11001 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11003 }
11004 break;
11005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11006 }
11007 return VINF_SUCCESS;
11008}
11009
11010#undef IEM_MOVS_CASE
11011
11012/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11013#define IEM_CMPS_CASE(ValBits, AddrBits) \
11014 IEM_MC_BEGIN(3, 3); \
11015 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11016 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11017 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11018 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11019 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11020 \
11021 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11022 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11023 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11024 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11025 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11026 IEM_MC_REF_EFLAGS(pEFlags); \
11027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11028 \
11029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11030 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11031 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11032 } IEM_MC_ELSE() { \
11033 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11034 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11035 } IEM_MC_ENDIF(); \
11036 IEM_MC_ADVANCE_RIP(); \
11037 IEM_MC_END(); \
11038
11039/** Opcode 0xa6. */
11040FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11041{
11042 IEMOP_HLP_NO_LOCK_PREFIX();
11043
11044 /*
11045 * Use the C implementation if a repeat prefix is encountered.
11046 */
11047 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11048 {
11049 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11050 switch (pIemCpu->enmEffAddrMode)
11051 {
11052 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11053 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11054 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11056 }
11057 }
11058 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11059 {
11060 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11061 switch (pIemCpu->enmEffAddrMode)
11062 {
11063 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11064 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11065 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11067 }
11068 }
11069 IEMOP_MNEMONIC("cmps Xb,Yb");
11070
11071 /*
11072 * Sharing case implementation with cmps[wdq] below.
11073 */
11074 switch (pIemCpu->enmEffAddrMode)
11075 {
11076 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11077 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11078 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11080 }
11081 return VINF_SUCCESS;
11082
11083}
11084
11085
11086/** Opcode 0xa7. */
11087FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11088{
11089 IEMOP_HLP_NO_LOCK_PREFIX();
11090
11091 /*
11092 * Use the C implementation if a repeat prefix is encountered.
11093 */
11094 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11095 {
11096 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11097 switch (pIemCpu->enmEffOpSize)
11098 {
11099 case IEMMODE_16BIT:
11100 switch (pIemCpu->enmEffAddrMode)
11101 {
11102 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11103 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11104 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11106 }
11107 break;
11108 case IEMMODE_32BIT:
11109 switch (pIemCpu->enmEffAddrMode)
11110 {
11111 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11112 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11113 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11115 }
11116 case IEMMODE_64BIT:
11117 switch (pIemCpu->enmEffAddrMode)
11118 {
11119 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11120 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11121 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11123 }
11124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11125 }
11126 }
11127
11128 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11129 {
11130 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11131 switch (pIemCpu->enmEffOpSize)
11132 {
11133 case IEMMODE_16BIT:
11134 switch (pIemCpu->enmEffAddrMode)
11135 {
11136 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11137 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11138 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11140 }
11141 break;
11142 case IEMMODE_32BIT:
11143 switch (pIemCpu->enmEffAddrMode)
11144 {
11145 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11146 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11147 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11149 }
11150 case IEMMODE_64BIT:
11151 switch (pIemCpu->enmEffAddrMode)
11152 {
11153 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11154 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11155 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11159 }
11160 }
11161
11162 IEMOP_MNEMONIC("cmps Xv,Yv");
11163
11164 /*
11165 * Annoying double switch here.
11166 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11167 */
11168 switch (pIemCpu->enmEffOpSize)
11169 {
11170 case IEMMODE_16BIT:
11171 switch (pIemCpu->enmEffAddrMode)
11172 {
11173 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11174 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11175 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11177 }
11178 break;
11179
11180 case IEMMODE_32BIT:
11181 switch (pIemCpu->enmEffAddrMode)
11182 {
11183 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11184 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11185 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11187 }
11188 break;
11189
11190 case IEMMODE_64BIT:
11191 switch (pIemCpu->enmEffAddrMode)
11192 {
11193 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11194 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11195 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11197 }
11198 break;
11199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11200 }
11201 return VINF_SUCCESS;
11202
11203}
11204
11205#undef IEM_CMPS_CASE
11206
11207/** Opcode 0xa8. */
11208FNIEMOP_DEF(iemOp_test_AL_Ib)
11209{
11210 IEMOP_MNEMONIC("test al,Ib");
11211 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11212 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11213}
11214
11215
11216/** Opcode 0xa9. */
11217FNIEMOP_DEF(iemOp_test_eAX_Iz)
11218{
11219 IEMOP_MNEMONIC("test rAX,Iz");
11220 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11221 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11222}
11223
11224
11225/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11226#define IEM_STOS_CASE(ValBits, AddrBits) \
11227 IEM_MC_BEGIN(0, 2); \
11228 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11229 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11230 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11231 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11232 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11234 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11235 } IEM_MC_ELSE() { \
11236 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11237 } IEM_MC_ENDIF(); \
11238 IEM_MC_ADVANCE_RIP(); \
11239 IEM_MC_END(); \
11240
11241/** Opcode 0xaa. */
11242FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11243{
11244 IEMOP_HLP_NO_LOCK_PREFIX();
11245
11246 /*
11247 * Use the C implementation if a repeat prefix is encountered.
11248 */
11249 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11250 {
11251 IEMOP_MNEMONIC("rep stos Yb,al");
11252 switch (pIemCpu->enmEffAddrMode)
11253 {
11254 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11255 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11256 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11258 }
11259 }
11260 IEMOP_MNEMONIC("stos Yb,al");
11261
11262 /*
11263 * Sharing case implementation with stos[wdq] below.
11264 */
11265 switch (pIemCpu->enmEffAddrMode)
11266 {
11267 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11268 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11269 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11271 }
11272 return VINF_SUCCESS;
11273}
11274
11275
11276/** Opcode 0xab. */
11277FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11278{
11279 IEMOP_HLP_NO_LOCK_PREFIX();
11280
11281 /*
11282 * Use the C implementation if a repeat prefix is encountered.
11283 */
11284 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11285 {
11286 IEMOP_MNEMONIC("rep stos Yv,rAX");
11287 switch (pIemCpu->enmEffOpSize)
11288 {
11289 case IEMMODE_16BIT:
11290 switch (pIemCpu->enmEffAddrMode)
11291 {
11292 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11293 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11294 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11296 }
11297 break;
11298 case IEMMODE_32BIT:
11299 switch (pIemCpu->enmEffAddrMode)
11300 {
11301 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11302 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11303 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11305 }
11306 case IEMMODE_64BIT:
11307 switch (pIemCpu->enmEffAddrMode)
11308 {
11309 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11310 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11311 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11313 }
11314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11315 }
11316 }
11317 IEMOP_MNEMONIC("stos Yv,rAX");
11318
11319 /*
11320 * Annoying double switch here.
11321 * Using ugly macro for implementing the cases, sharing it with stosb.
11322 */
11323 switch (pIemCpu->enmEffOpSize)
11324 {
11325 case IEMMODE_16BIT:
11326 switch (pIemCpu->enmEffAddrMode)
11327 {
11328 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11329 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11330 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11332 }
11333 break;
11334
11335 case IEMMODE_32BIT:
11336 switch (pIemCpu->enmEffAddrMode)
11337 {
11338 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11339 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11340 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11342 }
11343 break;
11344
11345 case IEMMODE_64BIT:
11346 switch (pIemCpu->enmEffAddrMode)
11347 {
11348 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11349 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11350 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11352 }
11353 break;
11354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11355 }
11356 return VINF_SUCCESS;
11357}
11358
11359#undef IEM_STOS_CASE
11360
11361/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11362#define IEM_LODS_CASE(ValBits, AddrBits) \
11363 IEM_MC_BEGIN(0, 2); \
11364 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11365 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11366 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11367 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11368 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11369 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11370 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11371 } IEM_MC_ELSE() { \
11372 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11373 } IEM_MC_ENDIF(); \
11374 IEM_MC_ADVANCE_RIP(); \
11375 IEM_MC_END();
11376
11377/** Opcode 0xac. */
11378FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11379{
11380 IEMOP_HLP_NO_LOCK_PREFIX();
11381
11382 /*
11383 * Use the C implementation if a repeat prefix is encountered.
11384 */
11385 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11386 {
11387 IEMOP_MNEMONIC("rep lodsb al,Xb");
11388 switch (pIemCpu->enmEffAddrMode)
11389 {
11390 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11391 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11392 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11394 }
11395 }
11396 IEMOP_MNEMONIC("lodsb al,Xb");
11397
11398 /*
11399 * Sharing case implementation with stos[wdq] below.
11400 */
11401 switch (pIemCpu->enmEffAddrMode)
11402 {
11403 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11404 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11405 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11407 }
11408 return VINF_SUCCESS;
11409}
11410
11411
11412/** Opcode 0xad. */
11413FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11414{
11415 IEMOP_HLP_NO_LOCK_PREFIX();
11416
11417 /*
11418 * Use the C implementation if a repeat prefix is encountered.
11419 */
11420 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11421 {
11422 IEMOP_MNEMONIC("rep lods rAX,Xv");
11423 switch (pIemCpu->enmEffOpSize)
11424 {
11425 case IEMMODE_16BIT:
11426 switch (pIemCpu->enmEffAddrMode)
11427 {
11428 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11429 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11430 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11432 }
11433 break;
11434 case IEMMODE_32BIT:
11435 switch (pIemCpu->enmEffAddrMode)
11436 {
11437 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11438 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11439 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11441 }
11442 case IEMMODE_64BIT:
11443 switch (pIemCpu->enmEffAddrMode)
11444 {
11445 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11446 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11447 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11449 }
11450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11451 }
11452 }
11453 IEMOP_MNEMONIC("lods rAX,Xv");
11454
11455 /*
11456 * Annoying double switch here.
11457 * Using ugly macro for implementing the cases, sharing it with lodsb.
11458 */
11459 switch (pIemCpu->enmEffOpSize)
11460 {
11461 case IEMMODE_16BIT:
11462 switch (pIemCpu->enmEffAddrMode)
11463 {
11464 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11465 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11466 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11468 }
11469 break;
11470
11471 case IEMMODE_32BIT:
11472 switch (pIemCpu->enmEffAddrMode)
11473 {
11474 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11475 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11476 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11478 }
11479 break;
11480
11481 case IEMMODE_64BIT:
11482 switch (pIemCpu->enmEffAddrMode)
11483 {
11484 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11485 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11486 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11488 }
11489 break;
11490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11491 }
11492 return VINF_SUCCESS;
11493}
11494
11495#undef IEM_LODS_CASE
11496
11497/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11498#define IEM_SCAS_CASE(ValBits, AddrBits) \
11499 IEM_MC_BEGIN(3, 2); \
11500 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11501 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11502 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11503 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11504 \
11505 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11506 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11507 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11508 IEM_MC_REF_EFLAGS(pEFlags); \
11509 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11510 \
11511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11512 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11513 } IEM_MC_ELSE() { \
11514 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11515 } IEM_MC_ENDIF(); \
11516 IEM_MC_ADVANCE_RIP(); \
11517 IEM_MC_END();
11518
11519/** Opcode 0xae. */
11520FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11521{
11522 IEMOP_HLP_NO_LOCK_PREFIX();
11523
11524 /*
11525 * Use the C implementation if a repeat prefix is encountered.
11526 */
11527 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11528 {
11529 IEMOP_MNEMONIC("repe scasb al,Xb");
11530 switch (pIemCpu->enmEffAddrMode)
11531 {
11532 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11533 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11534 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11536 }
11537 }
11538 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11539 {
11540 IEMOP_MNEMONIC("repne scasb al,Xb");
11541 switch (pIemCpu->enmEffAddrMode)
11542 {
11543 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11544 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11545 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11547 }
11548 }
11549 IEMOP_MNEMONIC("scasb al,Xb");
11550
11551 /*
11552 * Sharing case implementation with stos[wdq] below.
11553 */
11554 switch (pIemCpu->enmEffAddrMode)
11555 {
11556 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11557 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11558 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11560 }
11561 return VINF_SUCCESS;
11562}
11563
11564
11565/** Opcode 0xaf. */
11566FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11567{
11568 IEMOP_HLP_NO_LOCK_PREFIX();
11569
11570 /*
11571 * Use the C implementation if a repeat prefix is encountered.
11572 */
11573 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11574 {
11575 IEMOP_MNEMONIC("repe scas rAX,Xv");
11576 switch (pIemCpu->enmEffOpSize)
11577 {
11578 case IEMMODE_16BIT:
11579 switch (pIemCpu->enmEffAddrMode)
11580 {
11581 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11582 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11583 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11585 }
11586 break;
11587 case IEMMODE_32BIT:
11588 switch (pIemCpu->enmEffAddrMode)
11589 {
11590 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11591 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11592 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11594 }
11595 case IEMMODE_64BIT:
11596 switch (pIemCpu->enmEffAddrMode)
11597 {
11598 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11599 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11600 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11602 }
11603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11604 }
11605 }
11606 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11607 {
11608 IEMOP_MNEMONIC("repne scas rAX,Xv");
11609 switch (pIemCpu->enmEffOpSize)
11610 {
11611 case IEMMODE_16BIT:
11612 switch (pIemCpu->enmEffAddrMode)
11613 {
11614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11618 }
11619 break;
11620 case IEMMODE_32BIT:
11621 switch (pIemCpu->enmEffAddrMode)
11622 {
11623 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11624 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11625 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11627 }
11628 case IEMMODE_64BIT:
11629 switch (pIemCpu->enmEffAddrMode)
11630 {
11631 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11635 }
11636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11637 }
11638 }
11639 IEMOP_MNEMONIC("scas rAX,Xv");
11640
11641 /*
11642 * Annoying double switch here.
11643 * Using ugly macro for implementing the cases, sharing it with scasb.
11644 */
11645 switch (pIemCpu->enmEffOpSize)
11646 {
11647 case IEMMODE_16BIT:
11648 switch (pIemCpu->enmEffAddrMode)
11649 {
11650 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11651 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11652 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11654 }
11655 break;
11656
11657 case IEMMODE_32BIT:
11658 switch (pIemCpu->enmEffAddrMode)
11659 {
11660 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11661 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11662 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11664 }
11665 break;
11666
11667 case IEMMODE_64BIT:
11668 switch (pIemCpu->enmEffAddrMode)
11669 {
11670 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11671 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11672 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11674 }
11675 break;
11676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11677 }
11678 return VINF_SUCCESS;
11679}
11680
11681#undef IEM_SCAS_CASE
11682
11683/**
11684 * Common 'mov r8, imm8' helper.
11685 */
11686FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11687{
11688 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11689 IEMOP_HLP_NO_LOCK_PREFIX();
11690
11691 IEM_MC_BEGIN(0, 1);
11692 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11693 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11694 IEM_MC_ADVANCE_RIP();
11695 IEM_MC_END();
11696
11697 return VINF_SUCCESS;
11698}
11699
11700
11701/** Opcode 0xb0. */
11702FNIEMOP_DEF(iemOp_mov_AL_Ib)
11703{
11704 IEMOP_MNEMONIC("mov AL,Ib");
11705 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11706}
11707
11708
11709/** Opcode 0xb1. */
11710FNIEMOP_DEF(iemOp_CL_Ib)
11711{
11712 IEMOP_MNEMONIC("mov CL,Ib");
11713 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11714}
11715
11716
11717/** Opcode 0xb2. */
11718FNIEMOP_DEF(iemOp_DL_Ib)
11719{
11720 IEMOP_MNEMONIC("mov DL,Ib");
11721 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11722}
11723
11724
11725/** Opcode 0xb3. */
11726FNIEMOP_DEF(iemOp_BL_Ib)
11727{
11728 IEMOP_MNEMONIC("mov BL,Ib");
11729 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11730}
11731
11732
11733/** Opcode 0xb4. */
11734FNIEMOP_DEF(iemOp_mov_AH_Ib)
11735{
11736 IEMOP_MNEMONIC("mov AH,Ib");
11737 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11738}
11739
11740
11741/** Opcode 0xb5. */
11742FNIEMOP_DEF(iemOp_CH_Ib)
11743{
11744 IEMOP_MNEMONIC("mov CH,Ib");
11745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11746}
11747
11748
11749/** Opcode 0xb6. */
11750FNIEMOP_DEF(iemOp_DH_Ib)
11751{
11752 IEMOP_MNEMONIC("mov DH,Ib");
11753 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11754}
11755
11756
11757/** Opcode 0xb7. */
11758FNIEMOP_DEF(iemOp_BH_Ib)
11759{
11760 IEMOP_MNEMONIC("mov BH,Ib");
11761 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11762}
11763
11764
11765/**
11766 * Common 'mov regX,immX' helper.
11767 */
11768FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11769{
11770 switch (pIemCpu->enmEffOpSize)
11771 {
11772 case IEMMODE_16BIT:
11773 {
11774 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11775 IEMOP_HLP_NO_LOCK_PREFIX();
11776
11777 IEM_MC_BEGIN(0, 1);
11778 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11779 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11780 IEM_MC_ADVANCE_RIP();
11781 IEM_MC_END();
11782 break;
11783 }
11784
11785 case IEMMODE_32BIT:
11786 {
11787 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11788 IEMOP_HLP_NO_LOCK_PREFIX();
11789
11790 IEM_MC_BEGIN(0, 1);
11791 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11792 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11793 IEM_MC_ADVANCE_RIP();
11794 IEM_MC_END();
11795 break;
11796 }
11797 case IEMMODE_64BIT:
11798 {
11799 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11800 IEMOP_HLP_NO_LOCK_PREFIX();
11801
11802 IEM_MC_BEGIN(0, 1);
11803 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11804 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11805 IEM_MC_ADVANCE_RIP();
11806 IEM_MC_END();
11807 break;
11808 }
11809 }
11810
11811 return VINF_SUCCESS;
11812}
11813
11814
11815/** Opcode 0xb8. */
11816FNIEMOP_DEF(iemOp_eAX_Iv)
11817{
11818 IEMOP_MNEMONIC("mov rAX,IV");
11819 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11820}
11821
11822
11823/** Opcode 0xb9. */
11824FNIEMOP_DEF(iemOp_eCX_Iv)
11825{
11826 IEMOP_MNEMONIC("mov rCX,IV");
11827 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11828}
11829
11830
11831/** Opcode 0xba. */
11832FNIEMOP_DEF(iemOp_eDX_Iv)
11833{
11834 IEMOP_MNEMONIC("mov rDX,IV");
11835 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11836}
11837
11838
11839/** Opcode 0xbb. */
11840FNIEMOP_DEF(iemOp_eBX_Iv)
11841{
11842 IEMOP_MNEMONIC("mov rBX,IV");
11843 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11844}
11845
11846
11847/** Opcode 0xbc. */
11848FNIEMOP_DEF(iemOp_eSP_Iv)
11849{
11850 IEMOP_MNEMONIC("mov rSP,IV");
11851 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11852}
11853
11854
11855/** Opcode 0xbd. */
11856FNIEMOP_DEF(iemOp_eBP_Iv)
11857{
11858 IEMOP_MNEMONIC("mov rBP,IV");
11859 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11860}
11861
11862
11863/** Opcode 0xbe. */
11864FNIEMOP_DEF(iemOp_eSI_Iv)
11865{
11866 IEMOP_MNEMONIC("mov rSI,IV");
11867 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11868}
11869
11870
11871/** Opcode 0xbf. */
11872FNIEMOP_DEF(iemOp_eDI_Iv)
11873{
11874 IEMOP_MNEMONIC("mov rDI,IV");
11875 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11876}
11877
11878
11879/** Opcode 0xc0. */
11880FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11881{
11882 IEMOP_HLP_MIN_186();
11883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11884 PCIEMOPSHIFTSIZES pImpl;
11885 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11886 {
11887 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11888 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11889 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11890 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11891 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11892 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11893 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11894 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11895 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11896 }
11897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11898
11899 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11900 {
11901 /* register */
11902 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11903 IEMOP_HLP_NO_LOCK_PREFIX();
11904 IEM_MC_BEGIN(3, 0);
11905 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11906 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11907 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11908 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11909 IEM_MC_REF_EFLAGS(pEFlags);
11910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11911 IEM_MC_ADVANCE_RIP();
11912 IEM_MC_END();
11913 }
11914 else
11915 {
11916 /* memory */
11917 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11918 IEM_MC_BEGIN(3, 2);
11919 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11920 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11921 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11923
11924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11925 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11926 IEM_MC_ASSIGN(cShiftArg, cShift);
11927 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11928 IEM_MC_FETCH_EFLAGS(EFlags);
11929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11930
11931 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11932 IEM_MC_COMMIT_EFLAGS(EFlags);
11933 IEM_MC_ADVANCE_RIP();
11934 IEM_MC_END();
11935 }
11936 return VINF_SUCCESS;
11937}
11938
11939
11940/** Opcode 0xc1. */
11941FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11942{
11943 IEMOP_HLP_MIN_186();
11944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11945 PCIEMOPSHIFTSIZES pImpl;
11946 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11947 {
11948 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11949 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11950 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11951 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11952 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11953 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11954 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11955 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11956 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11957 }
11958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11959
11960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11961 {
11962 /* register */
11963 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11964 IEMOP_HLP_NO_LOCK_PREFIX();
11965 switch (pIemCpu->enmEffOpSize)
11966 {
11967 case IEMMODE_16BIT:
11968 IEM_MC_BEGIN(3, 0);
11969 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11970 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11971 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11972 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11973 IEM_MC_REF_EFLAGS(pEFlags);
11974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11975 IEM_MC_ADVANCE_RIP();
11976 IEM_MC_END();
11977 return VINF_SUCCESS;
11978
11979 case IEMMODE_32BIT:
11980 IEM_MC_BEGIN(3, 0);
11981 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11982 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11983 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11984 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11985 IEM_MC_REF_EFLAGS(pEFlags);
11986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11987 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11988 IEM_MC_ADVANCE_RIP();
11989 IEM_MC_END();
11990 return VINF_SUCCESS;
11991
11992 case IEMMODE_64BIT:
11993 IEM_MC_BEGIN(3, 0);
11994 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11995 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11997 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11998 IEM_MC_REF_EFLAGS(pEFlags);
11999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12000 IEM_MC_ADVANCE_RIP();
12001 IEM_MC_END();
12002 return VINF_SUCCESS;
12003
12004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12005 }
12006 }
12007 else
12008 {
12009 /* memory */
12010 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12011 switch (pIemCpu->enmEffOpSize)
12012 {
12013 case IEMMODE_16BIT:
12014 IEM_MC_BEGIN(3, 2);
12015 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12016 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12017 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12019
12020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12021 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12022 IEM_MC_ASSIGN(cShiftArg, cShift);
12023 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12024 IEM_MC_FETCH_EFLAGS(EFlags);
12025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12026
12027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12028 IEM_MC_COMMIT_EFLAGS(EFlags);
12029 IEM_MC_ADVANCE_RIP();
12030 IEM_MC_END();
12031 return VINF_SUCCESS;
12032
12033 case IEMMODE_32BIT:
12034 IEM_MC_BEGIN(3, 2);
12035 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12039
12040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12042 IEM_MC_ASSIGN(cShiftArg, cShift);
12043 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12044 IEM_MC_FETCH_EFLAGS(EFlags);
12045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12046
12047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12048 IEM_MC_COMMIT_EFLAGS(EFlags);
12049 IEM_MC_ADVANCE_RIP();
12050 IEM_MC_END();
12051 return VINF_SUCCESS;
12052
12053 case IEMMODE_64BIT:
12054 IEM_MC_BEGIN(3, 2);
12055 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12056 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12057 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12059
12060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12061 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12062 IEM_MC_ASSIGN(cShiftArg, cShift);
12063 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12064 IEM_MC_FETCH_EFLAGS(EFlags);
12065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12066
12067 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12068 IEM_MC_COMMIT_EFLAGS(EFlags);
12069 IEM_MC_ADVANCE_RIP();
12070 IEM_MC_END();
12071 return VINF_SUCCESS;
12072
12073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12074 }
12075 }
12076}
12077
12078
12079/** Opcode 0xc2. */
12080FNIEMOP_DEF(iemOp_retn_Iw)
12081{
12082 IEMOP_MNEMONIC("retn Iw");
12083 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12084 IEMOP_HLP_NO_LOCK_PREFIX();
12085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12086 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12087}
12088
12089
12090/** Opcode 0xc3. */
12091FNIEMOP_DEF(iemOp_retn)
12092{
12093 IEMOP_MNEMONIC("retn");
12094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12095 IEMOP_HLP_NO_LOCK_PREFIX();
12096 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12097}
12098
12099
12100/** Opcode 0xc4. */
12101FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12102{
12103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12104 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12105 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12106 {
12107 IEMOP_MNEMONIC("2-byte-vex");
12108 /* The LES instruction is invalid 64-bit mode. In legacy and
12109 compatability mode it is invalid with MOD=3.
12110 The use as a VEX prefix is made possible by assigning the inverted
12111 REX.R to the top MOD bit, and the top bit in the inverted register
12112 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12113 to accessing registers 0..7 in this VEX form. */
12114 /** @todo VEX: Just use new tables for it. */
12115 return IEMOP_RAISE_INVALID_OPCODE();
12116 }
12117 IEMOP_MNEMONIC("les Gv,Mp");
12118 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12119}
12120
12121
12122/** Opcode 0xc5. */
12123FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12124{
12125 /* The LDS instruction is invalid 64-bit mode. In legacy and
12126 compatability mode it is invalid with MOD=3.
12127 The use as a VEX prefix is made possible by assigning the inverted
12128 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12129 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12131 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12132 {
12133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12134 {
12135 IEMOP_MNEMONIC("lds Gv,Mp");
12136 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12137 }
12138 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12139 }
12140
12141 IEMOP_MNEMONIC("3-byte-vex");
12142 /** @todo Test when exctly the VEX conformance checks kick in during
12143 * instruction decoding and fetching (using \#PF). */
12144 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12145 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12146 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12147#if 0 /* will make sense of this next week... */
12148 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12149 &&
12150 )
12151 {
12152
12153 }
12154#endif
12155
12156 /** @todo VEX: Just use new tables for it. */
12157 return IEMOP_RAISE_INVALID_OPCODE();
12158}
12159
12160
12161/** Opcode 0xc6. */
12162FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12163{
12164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12165 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12166 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12167 return IEMOP_RAISE_INVALID_OPCODE();
12168 IEMOP_MNEMONIC("mov Eb,Ib");
12169
12170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12171 {
12172 /* register access */
12173 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12174 IEM_MC_BEGIN(0, 0);
12175 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12176 IEM_MC_ADVANCE_RIP();
12177 IEM_MC_END();
12178 }
12179 else
12180 {
12181 /* memory access. */
12182 IEM_MC_BEGIN(0, 1);
12183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12185 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12186 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12187 IEM_MC_ADVANCE_RIP();
12188 IEM_MC_END();
12189 }
12190 return VINF_SUCCESS;
12191}
12192
12193
12194/** Opcode 0xc7. */
12195FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12196{
12197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12198 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12199 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12200 return IEMOP_RAISE_INVALID_OPCODE();
12201 IEMOP_MNEMONIC("mov Ev,Iz");
12202
12203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12204 {
12205 /* register access */
12206 switch (pIemCpu->enmEffOpSize)
12207 {
12208 case IEMMODE_16BIT:
12209 IEM_MC_BEGIN(0, 0);
12210 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12211 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12212 IEM_MC_ADVANCE_RIP();
12213 IEM_MC_END();
12214 return VINF_SUCCESS;
12215
12216 case IEMMODE_32BIT:
12217 IEM_MC_BEGIN(0, 0);
12218 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12219 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12220 IEM_MC_ADVANCE_RIP();
12221 IEM_MC_END();
12222 return VINF_SUCCESS;
12223
12224 case IEMMODE_64BIT:
12225 IEM_MC_BEGIN(0, 0);
12226 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12227 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12228 IEM_MC_ADVANCE_RIP();
12229 IEM_MC_END();
12230 return VINF_SUCCESS;
12231
12232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12233 }
12234 }
12235 else
12236 {
12237 /* memory access. */
12238 switch (pIemCpu->enmEffOpSize)
12239 {
12240 case IEMMODE_16BIT:
12241 IEM_MC_BEGIN(0, 1);
12242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12244 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12245 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12246 IEM_MC_ADVANCE_RIP();
12247 IEM_MC_END();
12248 return VINF_SUCCESS;
12249
12250 case IEMMODE_32BIT:
12251 IEM_MC_BEGIN(0, 1);
12252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12254 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12255 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12256 IEM_MC_ADVANCE_RIP();
12257 IEM_MC_END();
12258 return VINF_SUCCESS;
12259
12260 case IEMMODE_64BIT:
12261 IEM_MC_BEGIN(0, 1);
12262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12264 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12265 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12266 IEM_MC_ADVANCE_RIP();
12267 IEM_MC_END();
12268 return VINF_SUCCESS;
12269
12270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12271 }
12272 }
12273}
12274
12275
12276
12277
12278/** Opcode 0xc8. */
12279FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12280{
12281 IEMOP_MNEMONIC("enter Iw,Ib");
12282 IEMOP_HLP_MIN_186();
12283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12284 IEMOP_HLP_NO_LOCK_PREFIX();
12285 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12286 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12287 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12288}
12289
12290
12291/** Opcode 0xc9. */
12292FNIEMOP_DEF(iemOp_leave)
12293{
12294 IEMOP_MNEMONIC("retn");
12295 IEMOP_HLP_MIN_186();
12296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12297 IEMOP_HLP_NO_LOCK_PREFIX();
12298 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12299}
12300
12301
12302/** Opcode 0xca. */
12303FNIEMOP_DEF(iemOp_retf_Iw)
12304{
12305 IEMOP_MNEMONIC("retf Iw");
12306 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12307 IEMOP_HLP_NO_LOCK_PREFIX();
12308 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12309 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12310}
12311
12312
12313/** Opcode 0xcb. */
12314FNIEMOP_DEF(iemOp_retf)
12315{
12316 IEMOP_MNEMONIC("retf");
12317 IEMOP_HLP_NO_LOCK_PREFIX();
12318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12319 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12320}
12321
12322
12323/** Opcode 0xcc. */
12324FNIEMOP_DEF(iemOp_int_3)
12325{
12326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12327 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12328}
12329
12330
12331/** Opcode 0xcd. */
12332FNIEMOP_DEF(iemOp_int_Ib)
12333{
12334 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12336 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12337}
12338
12339
12340/** Opcode 0xce. */
12341FNIEMOP_DEF(iemOp_into)
12342{
12343 IEMOP_MNEMONIC("into");
12344 IEMOP_HLP_NO_64BIT();
12345
12346 IEM_MC_BEGIN(2, 0);
12347 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12348 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12349 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12350 IEM_MC_END();
12351 return VINF_SUCCESS;
12352}
12353
12354
12355/** Opcode 0xcf. */
12356FNIEMOP_DEF(iemOp_iret)
12357{
12358 IEMOP_MNEMONIC("iret");
12359 IEMOP_HLP_NO_LOCK_PREFIX();
12360 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12361}
12362
12363
12364/** Opcode 0xd0. */
12365FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12366{
12367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12368 PCIEMOPSHIFTSIZES pImpl;
12369 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12370 {
12371 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12372 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12373 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12374 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12375 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12376 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12377 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12378 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12379 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12380 }
12381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12382
12383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12384 {
12385 /* register */
12386 IEMOP_HLP_NO_LOCK_PREFIX();
12387 IEM_MC_BEGIN(3, 0);
12388 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12389 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12390 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12391 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12392 IEM_MC_REF_EFLAGS(pEFlags);
12393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12394 IEM_MC_ADVANCE_RIP();
12395 IEM_MC_END();
12396 }
12397 else
12398 {
12399 /* memory */
12400 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12401 IEM_MC_BEGIN(3, 2);
12402 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12403 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12404 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12406
12407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12408 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12409 IEM_MC_FETCH_EFLAGS(EFlags);
12410 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12411
12412 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12413 IEM_MC_COMMIT_EFLAGS(EFlags);
12414 IEM_MC_ADVANCE_RIP();
12415 IEM_MC_END();
12416 }
12417 return VINF_SUCCESS;
12418}
12419
12420
12421
12422/** Opcode 0xd1. */
12423FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12424{
12425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12426 PCIEMOPSHIFTSIZES pImpl;
12427 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12428 {
12429 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12430 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12431 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12432 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12433 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12434 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12435 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12436 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12437 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12438 }
12439 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12440
12441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12442 {
12443 /* register */
12444 IEMOP_HLP_NO_LOCK_PREFIX();
12445 switch (pIemCpu->enmEffOpSize)
12446 {
12447 case IEMMODE_16BIT:
12448 IEM_MC_BEGIN(3, 0);
12449 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12450 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12451 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12452 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12453 IEM_MC_REF_EFLAGS(pEFlags);
12454 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12455 IEM_MC_ADVANCE_RIP();
12456 IEM_MC_END();
12457 return VINF_SUCCESS;
12458
12459 case IEMMODE_32BIT:
12460 IEM_MC_BEGIN(3, 0);
12461 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12462 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12463 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12464 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12465 IEM_MC_REF_EFLAGS(pEFlags);
12466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12467 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12468 IEM_MC_ADVANCE_RIP();
12469 IEM_MC_END();
12470 return VINF_SUCCESS;
12471
12472 case IEMMODE_64BIT:
12473 IEM_MC_BEGIN(3, 0);
12474 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12475 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12477 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12478 IEM_MC_REF_EFLAGS(pEFlags);
12479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12480 IEM_MC_ADVANCE_RIP();
12481 IEM_MC_END();
12482 return VINF_SUCCESS;
12483
12484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12485 }
12486 }
12487 else
12488 {
12489 /* memory */
12490 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12491 switch (pIemCpu->enmEffOpSize)
12492 {
12493 case IEMMODE_16BIT:
12494 IEM_MC_BEGIN(3, 2);
12495 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12496 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12497 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12499
12500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12501 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12502 IEM_MC_FETCH_EFLAGS(EFlags);
12503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12504
12505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12506 IEM_MC_COMMIT_EFLAGS(EFlags);
12507 IEM_MC_ADVANCE_RIP();
12508 IEM_MC_END();
12509 return VINF_SUCCESS;
12510
12511 case IEMMODE_32BIT:
12512 IEM_MC_BEGIN(3, 2);
12513 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12514 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12515 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12517
12518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12519 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12520 IEM_MC_FETCH_EFLAGS(EFlags);
12521 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12522
12523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12524 IEM_MC_COMMIT_EFLAGS(EFlags);
12525 IEM_MC_ADVANCE_RIP();
12526 IEM_MC_END();
12527 return VINF_SUCCESS;
12528
12529 case IEMMODE_64BIT:
12530 IEM_MC_BEGIN(3, 2);
12531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12532 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12533 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12535
12536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12537 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12538 IEM_MC_FETCH_EFLAGS(EFlags);
12539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12540
12541 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12542 IEM_MC_COMMIT_EFLAGS(EFlags);
12543 IEM_MC_ADVANCE_RIP();
12544 IEM_MC_END();
12545 return VINF_SUCCESS;
12546
12547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12548 }
12549 }
12550}
12551
12552
12553/** Opcode 0xd2. */
12554FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12555{
12556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12557 PCIEMOPSHIFTSIZES pImpl;
12558 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12559 {
12560 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12561 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12562 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12563 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12564 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12565 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12566 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12567 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12568 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12569 }
12570 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12571
12572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12573 {
12574 /* register */
12575 IEMOP_HLP_NO_LOCK_PREFIX();
12576 IEM_MC_BEGIN(3, 0);
12577 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12578 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12579 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12580 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12581 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12582 IEM_MC_REF_EFLAGS(pEFlags);
12583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12584 IEM_MC_ADVANCE_RIP();
12585 IEM_MC_END();
12586 }
12587 else
12588 {
12589 /* memory */
12590 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12591 IEM_MC_BEGIN(3, 2);
12592 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12593 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12594 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12596
12597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12599 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12600 IEM_MC_FETCH_EFLAGS(EFlags);
12601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12602
12603 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12604 IEM_MC_COMMIT_EFLAGS(EFlags);
12605 IEM_MC_ADVANCE_RIP();
12606 IEM_MC_END();
12607 }
12608 return VINF_SUCCESS;
12609}
12610
12611
12612/** Opcode 0xd3. */
12613FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12614{
12615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12616 PCIEMOPSHIFTSIZES pImpl;
12617 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12618 {
12619 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12620 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12621 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12622 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12623 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12624 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12625 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12626 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12627 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12628 }
12629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12630
12631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12632 {
12633 /* register */
12634 IEMOP_HLP_NO_LOCK_PREFIX();
12635 switch (pIemCpu->enmEffOpSize)
12636 {
12637 case IEMMODE_16BIT:
12638 IEM_MC_BEGIN(3, 0);
12639 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12640 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12641 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12642 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12643 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12644 IEM_MC_REF_EFLAGS(pEFlags);
12645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12646 IEM_MC_ADVANCE_RIP();
12647 IEM_MC_END();
12648 return VINF_SUCCESS;
12649
12650 case IEMMODE_32BIT:
12651 IEM_MC_BEGIN(3, 0);
12652 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12653 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12654 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12655 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12656 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12657 IEM_MC_REF_EFLAGS(pEFlags);
12658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12659 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12660 IEM_MC_ADVANCE_RIP();
12661 IEM_MC_END();
12662 return VINF_SUCCESS;
12663
12664 case IEMMODE_64BIT:
12665 IEM_MC_BEGIN(3, 0);
12666 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12667 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12668 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12669 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12670 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12671 IEM_MC_REF_EFLAGS(pEFlags);
12672 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12673 IEM_MC_ADVANCE_RIP();
12674 IEM_MC_END();
12675 return VINF_SUCCESS;
12676
12677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12678 }
12679 }
12680 else
12681 {
12682 /* memory */
12683 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12684 switch (pIemCpu->enmEffOpSize)
12685 {
12686 case IEMMODE_16BIT:
12687 IEM_MC_BEGIN(3, 2);
12688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12689 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12690 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12692
12693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12694 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12695 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12696 IEM_MC_FETCH_EFLAGS(EFlags);
12697 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12698
12699 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12700 IEM_MC_COMMIT_EFLAGS(EFlags);
12701 IEM_MC_ADVANCE_RIP();
12702 IEM_MC_END();
12703 return VINF_SUCCESS;
12704
12705 case IEMMODE_32BIT:
12706 IEM_MC_BEGIN(3, 2);
12707 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12708 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12709 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12711
12712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12713 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12714 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12715 IEM_MC_FETCH_EFLAGS(EFlags);
12716 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12717
12718 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12719 IEM_MC_COMMIT_EFLAGS(EFlags);
12720 IEM_MC_ADVANCE_RIP();
12721 IEM_MC_END();
12722 return VINF_SUCCESS;
12723
12724 case IEMMODE_64BIT:
12725 IEM_MC_BEGIN(3, 2);
12726 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12727 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12728 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12730
12731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12732 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12733 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12734 IEM_MC_FETCH_EFLAGS(EFlags);
12735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12736
12737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12738 IEM_MC_COMMIT_EFLAGS(EFlags);
12739 IEM_MC_ADVANCE_RIP();
12740 IEM_MC_END();
12741 return VINF_SUCCESS;
12742
12743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12744 }
12745 }
12746}
12747
12748/** Opcode 0xd4. */
12749FNIEMOP_DEF(iemOp_aam_Ib)
12750{
12751 IEMOP_MNEMONIC("aam Ib");
12752 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12753 IEMOP_HLP_NO_LOCK_PREFIX();
12754 IEMOP_HLP_NO_64BIT();
12755 if (!bImm)
12756 return IEMOP_RAISE_DIVIDE_ERROR();
12757 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12758}
12759
12760
12761/** Opcode 0xd5. */
12762FNIEMOP_DEF(iemOp_aad_Ib)
12763{
12764 IEMOP_MNEMONIC("aad Ib");
12765 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12766 IEMOP_HLP_NO_LOCK_PREFIX();
12767 IEMOP_HLP_NO_64BIT();
12768 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12769}
12770
12771
12772/** Opcode 0xd6. */
12773FNIEMOP_DEF(iemOp_salc)
12774{
12775 IEMOP_MNEMONIC("salc");
12776 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12777 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12779 IEMOP_HLP_NO_64BIT();
12780
12781 IEM_MC_BEGIN(0, 0);
12782 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12783 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12784 } IEM_MC_ELSE() {
12785 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12786 } IEM_MC_ENDIF();
12787 IEM_MC_ADVANCE_RIP();
12788 IEM_MC_END();
12789 return VINF_SUCCESS;
12790}
12791
12792
12793/** Opcode 0xd7. */
12794FNIEMOP_DEF(iemOp_xlat)
12795{
12796 IEMOP_MNEMONIC("xlat");
12797 IEMOP_HLP_NO_LOCK_PREFIX();
12798 switch (pIemCpu->enmEffAddrMode)
12799 {
12800 case IEMMODE_16BIT:
12801 IEM_MC_BEGIN(2, 0);
12802 IEM_MC_LOCAL(uint8_t, u8Tmp);
12803 IEM_MC_LOCAL(uint16_t, u16Addr);
12804 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12805 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12806 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12807 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12808 IEM_MC_ADVANCE_RIP();
12809 IEM_MC_END();
12810 return VINF_SUCCESS;
12811
12812 case IEMMODE_32BIT:
12813 IEM_MC_BEGIN(2, 0);
12814 IEM_MC_LOCAL(uint8_t, u8Tmp);
12815 IEM_MC_LOCAL(uint32_t, u32Addr);
12816 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12817 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12818 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12819 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12820 IEM_MC_ADVANCE_RIP();
12821 IEM_MC_END();
12822 return VINF_SUCCESS;
12823
12824 case IEMMODE_64BIT:
12825 IEM_MC_BEGIN(2, 0);
12826 IEM_MC_LOCAL(uint8_t, u8Tmp);
12827 IEM_MC_LOCAL(uint64_t, u64Addr);
12828 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12829 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12830 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12831 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12832 IEM_MC_ADVANCE_RIP();
12833 IEM_MC_END();
12834 return VINF_SUCCESS;
12835
12836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12837 }
12838}
12839
12840
12841/**
12842 * Common worker for FPU instructions working on ST0 and STn, and storing the
12843 * result in ST0.
12844 *
12845 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12846 */
12847FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12848{
12849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12850
12851 IEM_MC_BEGIN(3, 1);
12852 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12853 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12854 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12855 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12856
12857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12858 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12859 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12860 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12861 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12862 IEM_MC_ELSE()
12863 IEM_MC_FPU_STACK_UNDERFLOW(0);
12864 IEM_MC_ENDIF();
12865 IEM_MC_USED_FPU();
12866 IEM_MC_ADVANCE_RIP();
12867
12868 IEM_MC_END();
12869 return VINF_SUCCESS;
12870}
12871
12872
12873/**
12874 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12875 * flags.
12876 *
12877 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12878 */
12879FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12880{
12881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12882
12883 IEM_MC_BEGIN(3, 1);
12884 IEM_MC_LOCAL(uint16_t, u16Fsw);
12885 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12886 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12887 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12888
12889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12891 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12892 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12893 IEM_MC_UPDATE_FSW(u16Fsw);
12894 IEM_MC_ELSE()
12895 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12896 IEM_MC_ENDIF();
12897 IEM_MC_USED_FPU();
12898 IEM_MC_ADVANCE_RIP();
12899
12900 IEM_MC_END();
12901 return VINF_SUCCESS;
12902}
12903
12904
12905/**
12906 * Common worker for FPU instructions working on ST0 and STn, only affecting
12907 * flags, and popping when done.
12908 *
12909 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12910 */
12911FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12912{
12913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12914
12915 IEM_MC_BEGIN(3, 1);
12916 IEM_MC_LOCAL(uint16_t, u16Fsw);
12917 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12918 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12919 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12920
12921 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12922 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12923 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12924 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12925 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12926 IEM_MC_ELSE()
12927 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12928 IEM_MC_ENDIF();
12929 IEM_MC_USED_FPU();
12930 IEM_MC_ADVANCE_RIP();
12931
12932 IEM_MC_END();
12933 return VINF_SUCCESS;
12934}
12935
12936
12937/** Opcode 0xd8 11/0. */
12938FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12939{
12940 IEMOP_MNEMONIC("fadd st0,stN");
12941 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12942}
12943
12944
12945/** Opcode 0xd8 11/1. */
12946FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12947{
12948 IEMOP_MNEMONIC("fmul st0,stN");
12949 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12950}
12951
12952
12953/** Opcode 0xd8 11/2. */
12954FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12955{
12956 IEMOP_MNEMONIC("fcom st0,stN");
12957 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12958}
12959
12960
12961/** Opcode 0xd8 11/3. */
12962FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12963{
12964 IEMOP_MNEMONIC("fcomp st0,stN");
12965 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12966}
12967
12968
12969/** Opcode 0xd8 11/4. */
12970FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC("fsub st0,stN");
12973 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12974}
12975
12976
12977/** Opcode 0xd8 11/5. */
12978FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12979{
12980 IEMOP_MNEMONIC("fsubr st0,stN");
12981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12982}
12983
12984
12985/** Opcode 0xd8 11/6. */
12986FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
12987{
12988 IEMOP_MNEMONIC("fdiv st0,stN");
12989 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
12990}
12991
12992
12993/** Opcode 0xd8 11/7. */
12994FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
12995{
12996 IEMOP_MNEMONIC("fdivr st0,stN");
12997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
12998}
12999
13000
13001/**
13002 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13003 * the result in ST0.
13004 *
13005 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13006 */
13007FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13008{
13009 IEM_MC_BEGIN(3, 3);
13010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13011 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13012 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13013 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13014 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13015 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13016
13017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13019
13020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13022 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13023
13024 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13025 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13026 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13027 IEM_MC_ELSE()
13028 IEM_MC_FPU_STACK_UNDERFLOW(0);
13029 IEM_MC_ENDIF();
13030 IEM_MC_USED_FPU();
13031 IEM_MC_ADVANCE_RIP();
13032
13033 IEM_MC_END();
13034 return VINF_SUCCESS;
13035}
13036
13037
13038/** Opcode 0xd8 !11/0. */
13039FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13040{
13041 IEMOP_MNEMONIC("fadd st0,m32r");
13042 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13043}
13044
13045
13046/** Opcode 0xd8 !11/1. */
13047FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13048{
13049 IEMOP_MNEMONIC("fmul st0,m32r");
13050 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13051}
13052
13053
13054/** Opcode 0xd8 !11/2. */
13055FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13056{
13057 IEMOP_MNEMONIC("fcom st0,m32r");
13058
13059 IEM_MC_BEGIN(3, 3);
13060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13061 IEM_MC_LOCAL(uint16_t, u16Fsw);
13062 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13063 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13065 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13066
13067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13069
13070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13072 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13073
13074 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13075 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13076 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13077 IEM_MC_ELSE()
13078 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13079 IEM_MC_ENDIF();
13080 IEM_MC_USED_FPU();
13081 IEM_MC_ADVANCE_RIP();
13082
13083 IEM_MC_END();
13084 return VINF_SUCCESS;
13085}
13086
13087
13088/** Opcode 0xd8 !11/3. */
13089FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13090{
13091 IEMOP_MNEMONIC("fcomp st0,m32r");
13092
13093 IEM_MC_BEGIN(3, 3);
13094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13095 IEM_MC_LOCAL(uint16_t, u16Fsw);
13096 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13097 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13098 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13099 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13100
13101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13103
13104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13105 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13106 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13107
13108 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13109 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13110 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13111 IEM_MC_ELSE()
13112 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13113 IEM_MC_ENDIF();
13114 IEM_MC_USED_FPU();
13115 IEM_MC_ADVANCE_RIP();
13116
13117 IEM_MC_END();
13118 return VINF_SUCCESS;
13119}
13120
13121
13122/** Opcode 0xd8 !11/4. */
13123FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13124{
13125 IEMOP_MNEMONIC("fsub st0,m32r");
13126 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13127}
13128
13129
13130/** Opcode 0xd8 !11/5. */
13131FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13132{
13133 IEMOP_MNEMONIC("fsubr st0,m32r");
13134 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13135}
13136
13137
13138/** Opcode 0xd8 !11/6. */
13139FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13140{
13141 IEMOP_MNEMONIC("fdiv st0,m32r");
13142 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13143}
13144
13145
13146/** Opcode 0xd8 !11/7. */
13147FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13148{
13149 IEMOP_MNEMONIC("fdivr st0,m32r");
13150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13151}
13152
13153
13154/** Opcode 0xd8. */
13155FNIEMOP_DEF(iemOp_EscF0)
13156{
13157 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13159
13160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13161 {
13162 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13163 {
13164 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13165 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13166 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13167 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13168 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13169 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13170 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13171 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13173 }
13174 }
13175 else
13176 {
13177 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13178 {
13179 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13180 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13181 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13182 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13183 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13184 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13185 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13186 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13188 }
13189 }
13190}
13191
13192
13193/** Opcode 0xd9 /0 mem32real
13194 * @sa iemOp_fld_m64r */
13195FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13196{
13197 IEMOP_MNEMONIC("fld m32r");
13198
13199 IEM_MC_BEGIN(2, 3);
13200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13201 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13202 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13203 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13204 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13205
13206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208
13209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13210 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13211 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13212
13213 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13214 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13215 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13216 IEM_MC_ELSE()
13217 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13218 IEM_MC_ENDIF();
13219 IEM_MC_USED_FPU();
13220 IEM_MC_ADVANCE_RIP();
13221
13222 IEM_MC_END();
13223 return VINF_SUCCESS;
13224}
13225
13226
13227/** Opcode 0xd9 !11/2 mem32real */
13228FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13229{
13230 IEMOP_MNEMONIC("fst m32r");
13231 IEM_MC_BEGIN(3, 2);
13232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13233 IEM_MC_LOCAL(uint16_t, u16Fsw);
13234 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13235 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13237
13238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13241 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13242
13243 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13244 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13245 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13246 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13247 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13248 IEM_MC_ELSE()
13249 IEM_MC_IF_FCW_IM()
13250 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13251 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13252 IEM_MC_ENDIF();
13253 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13254 IEM_MC_ENDIF();
13255 IEM_MC_USED_FPU();
13256 IEM_MC_ADVANCE_RIP();
13257
13258 IEM_MC_END();
13259 return VINF_SUCCESS;
13260}
13261
13262
13263/** Opcode 0xd9 !11/3 */
13264FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13265{
13266 IEMOP_MNEMONIC("fstp m32r");
13267 IEM_MC_BEGIN(3, 2);
13268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13269 IEM_MC_LOCAL(uint16_t, u16Fsw);
13270 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13271 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13272 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13273
13274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13278
13279 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13280 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13281 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13282 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13283 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13284 IEM_MC_ELSE()
13285 IEM_MC_IF_FCW_IM()
13286 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13287 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13288 IEM_MC_ENDIF();
13289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13290 IEM_MC_ENDIF();
13291 IEM_MC_USED_FPU();
13292 IEM_MC_ADVANCE_RIP();
13293
13294 IEM_MC_END();
13295 return VINF_SUCCESS;
13296}
13297
13298
13299/** Opcode 0xd9 !11/4 */
13300FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13301{
13302 IEMOP_MNEMONIC("fldenv m14/28byte");
13303 IEM_MC_BEGIN(3, 0);
13304 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13305 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13306 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13309 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13310 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13311 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13312 IEM_MC_END();
13313 return VINF_SUCCESS;
13314}
13315
13316
13317/** Opcode 0xd9 !11/5 */
13318FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13319{
13320 IEMOP_MNEMONIC("fldcw m2byte");
13321 IEM_MC_BEGIN(1, 1);
13322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13323 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13327 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13328 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13329 IEM_MC_END();
13330 return VINF_SUCCESS;
13331}
13332
13333
13334/** Opcode 0xd9 !11/6 */
13335FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13336{
13337 IEMOP_MNEMONIC("fstenv m14/m28byte");
13338 IEM_MC_BEGIN(3, 0);
13339 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13340 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13341 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13345 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13346 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13347 IEM_MC_END();
13348 return VINF_SUCCESS;
13349}
13350
13351
13352/** Opcode 0xd9 !11/7 */
13353FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13354{
13355 IEMOP_MNEMONIC("fnstcw m2byte");
13356 IEM_MC_BEGIN(2, 0);
13357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13358 IEM_MC_LOCAL(uint16_t, u16Fcw);
13359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13362 IEM_MC_FETCH_FCW(u16Fcw);
13363 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13364 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13365 IEM_MC_END();
13366 return VINF_SUCCESS;
13367}
13368
13369
13370/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13371FNIEMOP_DEF(iemOp_fnop)
13372{
13373 IEMOP_MNEMONIC("fnop");
13374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13375
13376 IEM_MC_BEGIN(0, 0);
13377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13379 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13380 * intel optimizations. Investigate. */
13381 IEM_MC_UPDATE_FPU_OPCODE_IP();
13382 IEM_MC_USED_FPU();
13383 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13384 IEM_MC_END();
13385 return VINF_SUCCESS;
13386}
13387
13388
13389/** Opcode 0xd9 11/0 stN */
13390FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13391{
13392 IEMOP_MNEMONIC("fld stN");
13393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13394
13395 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13396 * indicates that it does. */
13397 IEM_MC_BEGIN(0, 2);
13398 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13399 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13400 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13401 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13402 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13403 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13404 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13405 IEM_MC_ELSE()
13406 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13407 IEM_MC_ENDIF();
13408 IEM_MC_USED_FPU();
13409 IEM_MC_ADVANCE_RIP();
13410 IEM_MC_END();
13411
13412 return VINF_SUCCESS;
13413}
13414
13415
13416/** Opcode 0xd9 11/3 stN */
13417FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13418{
13419 IEMOP_MNEMONIC("fxch stN");
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421
13422 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13423 * indicates that it does. */
13424 IEM_MC_BEGIN(1, 3);
13425 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13426 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13427 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13428 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13431 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13432 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13433 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13434 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13435 IEM_MC_ELSE()
13436 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13437 IEM_MC_ENDIF();
13438 IEM_MC_USED_FPU();
13439 IEM_MC_ADVANCE_RIP();
13440 IEM_MC_END();
13441
13442 return VINF_SUCCESS;
13443}
13444
13445
13446/** Opcode 0xd9 11/4, 0xdd 11/2. */
13447FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13448{
13449 IEMOP_MNEMONIC("fstp st0,stN");
13450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13451
13452 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13453 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13454 if (!iDstReg)
13455 {
13456 IEM_MC_BEGIN(0, 1);
13457 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13460 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13461 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13462 IEM_MC_ELSE()
13463 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13464 IEM_MC_ENDIF();
13465 IEM_MC_USED_FPU();
13466 IEM_MC_ADVANCE_RIP();
13467 IEM_MC_END();
13468 }
13469 else
13470 {
13471 IEM_MC_BEGIN(0, 2);
13472 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13473 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13475 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13476 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13477 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13478 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13479 IEM_MC_ELSE()
13480 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13481 IEM_MC_ENDIF();
13482 IEM_MC_USED_FPU();
13483 IEM_MC_ADVANCE_RIP();
13484 IEM_MC_END();
13485 }
13486 return VINF_SUCCESS;
13487}
13488
13489
13490/**
13491 * Common worker for FPU instructions working on ST0 and replaces it with the
13492 * result, i.e. unary operators.
13493 *
13494 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13495 */
13496FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13497{
13498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13499
13500 IEM_MC_BEGIN(2, 1);
13501 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13502 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13503 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13504
13505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13508 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13509 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13510 IEM_MC_ELSE()
13511 IEM_MC_FPU_STACK_UNDERFLOW(0);
13512 IEM_MC_ENDIF();
13513 IEM_MC_USED_FPU();
13514 IEM_MC_ADVANCE_RIP();
13515
13516 IEM_MC_END();
13517 return VINF_SUCCESS;
13518}
13519
13520
13521/** Opcode 0xd9 0xe0. */
13522FNIEMOP_DEF(iemOp_fchs)
13523{
13524 IEMOP_MNEMONIC("fchs st0");
13525 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13526}
13527
13528
13529/** Opcode 0xd9 0xe1. */
13530FNIEMOP_DEF(iemOp_fabs)
13531{
13532 IEMOP_MNEMONIC("fabs st0");
13533 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13534}
13535
13536
13537/**
13538 * Common worker for FPU instructions working on ST0 and only returns FSW.
13539 *
13540 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13541 */
13542FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13543{
13544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13545
13546 IEM_MC_BEGIN(2, 1);
13547 IEM_MC_LOCAL(uint16_t, u16Fsw);
13548 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13549 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13550
13551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13553 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13554 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13555 IEM_MC_UPDATE_FSW(u16Fsw);
13556 IEM_MC_ELSE()
13557 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13558 IEM_MC_ENDIF();
13559 IEM_MC_USED_FPU();
13560 IEM_MC_ADVANCE_RIP();
13561
13562 IEM_MC_END();
13563 return VINF_SUCCESS;
13564}
13565
13566
13567/** Opcode 0xd9 0xe4. */
13568FNIEMOP_DEF(iemOp_ftst)
13569{
13570 IEMOP_MNEMONIC("ftst st0");
13571 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13572}
13573
13574
13575/** Opcode 0xd9 0xe5. */
13576FNIEMOP_DEF(iemOp_fxam)
13577{
13578 IEMOP_MNEMONIC("fxam st0");
13579 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13580}
13581
13582
13583/**
13584 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13585 *
13586 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13587 */
13588FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13589{
13590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13591
13592 IEM_MC_BEGIN(1, 1);
13593 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13594 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13595
13596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13597 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13598 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13599 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13600 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13601 IEM_MC_ELSE()
13602 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13603 IEM_MC_ENDIF();
13604 IEM_MC_USED_FPU();
13605 IEM_MC_ADVANCE_RIP();
13606
13607 IEM_MC_END();
13608 return VINF_SUCCESS;
13609}
13610
13611
13612/** Opcode 0xd9 0xe8. */
13613FNIEMOP_DEF(iemOp_fld1)
13614{
13615 IEMOP_MNEMONIC("fld1");
13616 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13617}
13618
13619
13620/** Opcode 0xd9 0xe9. */
13621FNIEMOP_DEF(iemOp_fldl2t)
13622{
13623 IEMOP_MNEMONIC("fldl2t");
13624 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13625}
13626
13627
13628/** Opcode 0xd9 0xea. */
13629FNIEMOP_DEF(iemOp_fldl2e)
13630{
13631 IEMOP_MNEMONIC("fldl2e");
13632 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13633}
13634
13635/** Opcode 0xd9 0xeb. */
13636FNIEMOP_DEF(iemOp_fldpi)
13637{
13638 IEMOP_MNEMONIC("fldpi");
13639 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13640}
13641
13642
13643/** Opcode 0xd9 0xec. */
13644FNIEMOP_DEF(iemOp_fldlg2)
13645{
13646 IEMOP_MNEMONIC("fldlg2");
13647 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13648}
13649
13650/** Opcode 0xd9 0xed. */
13651FNIEMOP_DEF(iemOp_fldln2)
13652{
13653 IEMOP_MNEMONIC("fldln2");
13654 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13655}
13656
13657
13658/** Opcode 0xd9 0xee. */
13659FNIEMOP_DEF(iemOp_fldz)
13660{
13661 IEMOP_MNEMONIC("fldz");
13662 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13663}
13664
13665
13666/** Opcode 0xd9 0xf0. */
13667FNIEMOP_DEF(iemOp_f2xm1)
13668{
13669 IEMOP_MNEMONIC("f2xm1 st0");
13670 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13671}
13672
13673
13674/** Opcode 0xd9 0xf1. */
13675FNIEMOP_DEF(iemOp_fylx2)
13676{
13677 IEMOP_MNEMONIC("fylx2 st0");
13678 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13679}
13680
13681
13682/**
13683 * Common worker for FPU instructions working on ST0 and having two outputs, one
13684 * replacing ST0 and one pushed onto the stack.
13685 *
13686 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13687 */
13688FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13689{
13690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13691
13692 IEM_MC_BEGIN(2, 1);
13693 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13694 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13695 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13696
13697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13699 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13700 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13701 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13702 IEM_MC_ELSE()
13703 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13704 IEM_MC_ENDIF();
13705 IEM_MC_USED_FPU();
13706 IEM_MC_ADVANCE_RIP();
13707
13708 IEM_MC_END();
13709 return VINF_SUCCESS;
13710}
13711
13712
13713/** Opcode 0xd9 0xf2. */
13714FNIEMOP_DEF(iemOp_fptan)
13715{
13716 IEMOP_MNEMONIC("fptan st0");
13717 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13718}
13719
13720
13721/**
13722 * Common worker for FPU instructions working on STn and ST0, storing the result
13723 * in STn, and popping the stack unless IE, DE or ZE was raised.
13724 *
13725 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13726 */
13727FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13728{
13729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13730
13731 IEM_MC_BEGIN(3, 1);
13732 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13733 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13736
13737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13739
13740 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13741 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13742 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13743 IEM_MC_ELSE()
13744 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13745 IEM_MC_ENDIF();
13746 IEM_MC_USED_FPU();
13747 IEM_MC_ADVANCE_RIP();
13748
13749 IEM_MC_END();
13750 return VINF_SUCCESS;
13751}
13752
13753
13754/** Opcode 0xd9 0xf3. */
13755FNIEMOP_DEF(iemOp_fpatan)
13756{
13757 IEMOP_MNEMONIC("fpatan st1,st0");
13758 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13759}
13760
13761
13762/** Opcode 0xd9 0xf4. */
13763FNIEMOP_DEF(iemOp_fxtract)
13764{
13765 IEMOP_MNEMONIC("fxtract st0");
13766 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13767}
13768
13769
13770/** Opcode 0xd9 0xf5. */
13771FNIEMOP_DEF(iemOp_fprem1)
13772{
13773 IEMOP_MNEMONIC("fprem1 st0, st1");
13774 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13775}
13776
13777
13778/** Opcode 0xd9 0xf6. */
13779FNIEMOP_DEF(iemOp_fdecstp)
13780{
13781 IEMOP_MNEMONIC("fdecstp");
13782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13783 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13784 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13785 * FINCSTP and FDECSTP. */
13786
13787 IEM_MC_BEGIN(0,0);
13788
13789 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13790 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13791
13792 IEM_MC_FPU_STACK_DEC_TOP();
13793 IEM_MC_UPDATE_FSW_CONST(0);
13794
13795 IEM_MC_USED_FPU();
13796 IEM_MC_ADVANCE_RIP();
13797 IEM_MC_END();
13798 return VINF_SUCCESS;
13799}
13800
13801
13802/** Opcode 0xd9 0xf7. */
13803FNIEMOP_DEF(iemOp_fincstp)
13804{
13805 IEMOP_MNEMONIC("fincstp");
13806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13807 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13808 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13809 * FINCSTP and FDECSTP. */
13810
13811 IEM_MC_BEGIN(0,0);
13812
13813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13815
13816 IEM_MC_FPU_STACK_INC_TOP();
13817 IEM_MC_UPDATE_FSW_CONST(0);
13818
13819 IEM_MC_USED_FPU();
13820 IEM_MC_ADVANCE_RIP();
13821 IEM_MC_END();
13822 return VINF_SUCCESS;
13823}
13824
13825
13826/** Opcode 0xd9 0xf8. */
13827FNIEMOP_DEF(iemOp_fprem)
13828{
13829 IEMOP_MNEMONIC("fprem st0, st1");
13830 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13831}
13832
13833
13834/** Opcode 0xd9 0xf9. */
13835FNIEMOP_DEF(iemOp_fyl2xp1)
13836{
13837 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13838 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13839}
13840
13841
13842/** Opcode 0xd9 0xfa. */
13843FNIEMOP_DEF(iemOp_fsqrt)
13844{
13845 IEMOP_MNEMONIC("fsqrt st0");
13846 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13847}
13848
13849
13850/** Opcode 0xd9 0xfb. */
13851FNIEMOP_DEF(iemOp_fsincos)
13852{
13853 IEMOP_MNEMONIC("fsincos st0");
13854 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13855}
13856
13857
13858/** Opcode 0xd9 0xfc. */
13859FNIEMOP_DEF(iemOp_frndint)
13860{
13861 IEMOP_MNEMONIC("frndint st0");
13862 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13863}
13864
13865
13866/** Opcode 0xd9 0xfd. */
13867FNIEMOP_DEF(iemOp_fscale)
13868{
13869 IEMOP_MNEMONIC("fscale st0, st1");
13870 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13871}
13872
13873
13874/** Opcode 0xd9 0xfe. */
13875FNIEMOP_DEF(iemOp_fsin)
13876{
13877 IEMOP_MNEMONIC("fsin st0");
13878 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13879}
13880
13881
13882/** Opcode 0xd9 0xff. */
13883FNIEMOP_DEF(iemOp_fcos)
13884{
13885 IEMOP_MNEMONIC("fcos st0");
13886 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13887}
13888
13889
13890/** Used by iemOp_EscF1. */
13891static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13892{
13893 /* 0xe0 */ iemOp_fchs,
13894 /* 0xe1 */ iemOp_fabs,
13895 /* 0xe2 */ iemOp_Invalid,
13896 /* 0xe3 */ iemOp_Invalid,
13897 /* 0xe4 */ iemOp_ftst,
13898 /* 0xe5 */ iemOp_fxam,
13899 /* 0xe6 */ iemOp_Invalid,
13900 /* 0xe7 */ iemOp_Invalid,
13901 /* 0xe8 */ iemOp_fld1,
13902 /* 0xe9 */ iemOp_fldl2t,
13903 /* 0xea */ iemOp_fldl2e,
13904 /* 0xeb */ iemOp_fldpi,
13905 /* 0xec */ iemOp_fldlg2,
13906 /* 0xed */ iemOp_fldln2,
13907 /* 0xee */ iemOp_fldz,
13908 /* 0xef */ iemOp_Invalid,
13909 /* 0xf0 */ iemOp_f2xm1,
13910 /* 0xf1 */ iemOp_fylx2,
13911 /* 0xf2 */ iemOp_fptan,
13912 /* 0xf3 */ iemOp_fpatan,
13913 /* 0xf4 */ iemOp_fxtract,
13914 /* 0xf5 */ iemOp_fprem1,
13915 /* 0xf6 */ iemOp_fdecstp,
13916 /* 0xf7 */ iemOp_fincstp,
13917 /* 0xf8 */ iemOp_fprem,
13918 /* 0xf9 */ iemOp_fyl2xp1,
13919 /* 0xfa */ iemOp_fsqrt,
13920 /* 0xfb */ iemOp_fsincos,
13921 /* 0xfc */ iemOp_frndint,
13922 /* 0xfd */ iemOp_fscale,
13923 /* 0xfe */ iemOp_fsin,
13924 /* 0xff */ iemOp_fcos
13925};
13926
13927
13928/** Opcode 0xd9. */
13929FNIEMOP_DEF(iemOp_EscF1)
13930{
13931 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13934 {
13935 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13936 {
13937 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13938 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13939 case 2:
13940 if (bRm == 0xd0)
13941 return FNIEMOP_CALL(iemOp_fnop);
13942 return IEMOP_RAISE_INVALID_OPCODE();
13943 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13944 case 4:
13945 case 5:
13946 case 6:
13947 case 7:
13948 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13949 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13951 }
13952 }
13953 else
13954 {
13955 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13956 {
13957 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13958 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13959 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13960 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13961 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13962 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13963 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13964 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13966 }
13967 }
13968}
13969
13970
13971/** Opcode 0xda 11/0. */
13972FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13973{
13974 IEMOP_MNEMONIC("fcmovb st0,stN");
13975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13976
13977 IEM_MC_BEGIN(0, 1);
13978 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13979
13980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13982
13983 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
13985 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13986 IEM_MC_ENDIF();
13987 IEM_MC_UPDATE_FPU_OPCODE_IP();
13988 IEM_MC_ELSE()
13989 IEM_MC_FPU_STACK_UNDERFLOW(0);
13990 IEM_MC_ENDIF();
13991 IEM_MC_USED_FPU();
13992 IEM_MC_ADVANCE_RIP();
13993
13994 IEM_MC_END();
13995 return VINF_SUCCESS;
13996}
13997
13998
13999/** Opcode 0xda 11/1. */
14000FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14001{
14002 IEMOP_MNEMONIC("fcmove st0,stN");
14003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14004
14005 IEM_MC_BEGIN(0, 1);
14006 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14007
14008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14009 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14010
14011 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14013 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14014 IEM_MC_ENDIF();
14015 IEM_MC_UPDATE_FPU_OPCODE_IP();
14016 IEM_MC_ELSE()
14017 IEM_MC_FPU_STACK_UNDERFLOW(0);
14018 IEM_MC_ENDIF();
14019 IEM_MC_USED_FPU();
14020 IEM_MC_ADVANCE_RIP();
14021
14022 IEM_MC_END();
14023 return VINF_SUCCESS;
14024}
14025
14026
14027/** Opcode 0xda 11/2. */
14028FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14029{
14030 IEMOP_MNEMONIC("fcmovbe st0,stN");
14031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14032
14033 IEM_MC_BEGIN(0, 1);
14034 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14035
14036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14038
14039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14040 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14041 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14042 IEM_MC_ENDIF();
14043 IEM_MC_UPDATE_FPU_OPCODE_IP();
14044 IEM_MC_ELSE()
14045 IEM_MC_FPU_STACK_UNDERFLOW(0);
14046 IEM_MC_ENDIF();
14047 IEM_MC_USED_FPU();
14048 IEM_MC_ADVANCE_RIP();
14049
14050 IEM_MC_END();
14051 return VINF_SUCCESS;
14052}
14053
14054
14055/** Opcode 0xda 11/3. */
14056FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14057{
14058 IEMOP_MNEMONIC("fcmovu st0,stN");
14059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14060
14061 IEM_MC_BEGIN(0, 1);
14062 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14063
14064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14066
14067 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14069 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14070 IEM_MC_ENDIF();
14071 IEM_MC_UPDATE_FPU_OPCODE_IP();
14072 IEM_MC_ELSE()
14073 IEM_MC_FPU_STACK_UNDERFLOW(0);
14074 IEM_MC_ENDIF();
14075 IEM_MC_USED_FPU();
14076 IEM_MC_ADVANCE_RIP();
14077
14078 IEM_MC_END();
14079 return VINF_SUCCESS;
14080}
14081
14082
14083/**
14084 * Common worker for FPU instructions working on ST0 and STn, only affecting
14085 * flags, and popping twice when done.
14086 *
14087 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14088 */
14089FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14090{
14091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14092
14093 IEM_MC_BEGIN(3, 1);
14094 IEM_MC_LOCAL(uint16_t, u16Fsw);
14095 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14097 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14098
14099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14101 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14102 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14103 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14104 IEM_MC_ELSE()
14105 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14106 IEM_MC_ENDIF();
14107 IEM_MC_USED_FPU();
14108 IEM_MC_ADVANCE_RIP();
14109
14110 IEM_MC_END();
14111 return VINF_SUCCESS;
14112}
14113
14114
14115/** Opcode 0xda 0xe9. */
14116FNIEMOP_DEF(iemOp_fucompp)
14117{
14118 IEMOP_MNEMONIC("fucompp st0,stN");
14119 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14120}
14121
14122
14123/**
14124 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14125 * the result in ST0.
14126 *
14127 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14128 */
14129FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14130{
14131 IEM_MC_BEGIN(3, 3);
14132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14133 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14134 IEM_MC_LOCAL(int32_t, i32Val2);
14135 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14136 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14137 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14138
14139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14141
14142 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14143 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14144 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14145
14146 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14147 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14148 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14149 IEM_MC_ELSE()
14150 IEM_MC_FPU_STACK_UNDERFLOW(0);
14151 IEM_MC_ENDIF();
14152 IEM_MC_USED_FPU();
14153 IEM_MC_ADVANCE_RIP();
14154
14155 IEM_MC_END();
14156 return VINF_SUCCESS;
14157}
14158
14159
14160/** Opcode 0xda !11/0. */
14161FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14162{
14163 IEMOP_MNEMONIC("fiadd m32i");
14164 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14165}
14166
14167
14168/** Opcode 0xda !11/1. */
14169FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14170{
14171 IEMOP_MNEMONIC("fimul m32i");
14172 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14173}
14174
14175
14176/** Opcode 0xda !11/2. */
14177FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14178{
14179 IEMOP_MNEMONIC("ficom st0,m32i");
14180
14181 IEM_MC_BEGIN(3, 3);
14182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14183 IEM_MC_LOCAL(uint16_t, u16Fsw);
14184 IEM_MC_LOCAL(int32_t, i32Val2);
14185 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14186 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14187 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14188
14189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14191
14192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14193 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14194 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14195
14196 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14197 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14198 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14199 IEM_MC_ELSE()
14200 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14201 IEM_MC_ENDIF();
14202 IEM_MC_USED_FPU();
14203 IEM_MC_ADVANCE_RIP();
14204
14205 IEM_MC_END();
14206 return VINF_SUCCESS;
14207}
14208
14209
14210/** Opcode 0xda !11/3. */
14211FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14212{
14213 IEMOP_MNEMONIC("ficomp st0,m32i");
14214
14215 IEM_MC_BEGIN(3, 3);
14216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14217 IEM_MC_LOCAL(uint16_t, u16Fsw);
14218 IEM_MC_LOCAL(int32_t, i32Val2);
14219 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14221 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14222
14223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14225
14226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14227 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14228 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14229
14230 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14231 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14232 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14233 IEM_MC_ELSE()
14234 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14235 IEM_MC_ENDIF();
14236 IEM_MC_USED_FPU();
14237 IEM_MC_ADVANCE_RIP();
14238
14239 IEM_MC_END();
14240 return VINF_SUCCESS;
14241}
14242
14243
14244/** Opcode 0xda !11/4. */
14245FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14246{
14247 IEMOP_MNEMONIC("fisub m32i");
14248 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14249}
14250
14251
14252/** Opcode 0xda !11/5. */
14253FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14254{
14255 IEMOP_MNEMONIC("fisubr m32i");
14256 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14257}
14258
14259
14260/** Opcode 0xda !11/6. */
14261FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14262{
14263 IEMOP_MNEMONIC("fidiv m32i");
14264 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14265}
14266
14267
14268/** Opcode 0xda !11/7. */
14269FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14270{
14271 IEMOP_MNEMONIC("fidivr m32i");
14272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14273}
14274
14275
14276/** Opcode 0xda. */
14277FNIEMOP_DEF(iemOp_EscF2)
14278{
14279 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14282 {
14283 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14284 {
14285 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14286 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14287 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14288 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14289 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14290 case 5:
14291 if (bRm == 0xe9)
14292 return FNIEMOP_CALL(iemOp_fucompp);
14293 return IEMOP_RAISE_INVALID_OPCODE();
14294 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14295 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14297 }
14298 }
14299 else
14300 {
14301 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14302 {
14303 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14304 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14305 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14306 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14307 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14308 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14309 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14310 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14312 }
14313 }
14314}
14315
14316
14317/** Opcode 0xdb !11/0. */
14318FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14319{
14320 IEMOP_MNEMONIC("fild m32i");
14321
14322 IEM_MC_BEGIN(2, 3);
14323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14324 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14325 IEM_MC_LOCAL(int32_t, i32Val);
14326 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14327 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14328
14329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14331
14332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14333 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14334 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14335
14336 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14337 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14338 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14339 IEM_MC_ELSE()
14340 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14341 IEM_MC_ENDIF();
14342 IEM_MC_USED_FPU();
14343 IEM_MC_ADVANCE_RIP();
14344
14345 IEM_MC_END();
14346 return VINF_SUCCESS;
14347}
14348
14349
14350/** Opcode 0xdb !11/1. */
14351FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14352{
14353 IEMOP_MNEMONIC("fisttp m32i");
14354 IEM_MC_BEGIN(3, 2);
14355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14356 IEM_MC_LOCAL(uint16_t, u16Fsw);
14357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14358 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14359 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14360
14361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14364 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14365
14366 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14367 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14368 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14369 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14370 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14371 IEM_MC_ELSE()
14372 IEM_MC_IF_FCW_IM()
14373 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14374 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14375 IEM_MC_ENDIF();
14376 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14377 IEM_MC_ENDIF();
14378 IEM_MC_USED_FPU();
14379 IEM_MC_ADVANCE_RIP();
14380
14381 IEM_MC_END();
14382 return VINF_SUCCESS;
14383}
14384
14385
14386/** Opcode 0xdb !11/2. */
14387FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14388{
14389 IEMOP_MNEMONIC("fist m32i");
14390 IEM_MC_BEGIN(3, 2);
14391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14392 IEM_MC_LOCAL(uint16_t, u16Fsw);
14393 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14394 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14395 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14396
14397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14401
14402 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14404 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14405 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14406 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14407 IEM_MC_ELSE()
14408 IEM_MC_IF_FCW_IM()
14409 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14410 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14411 IEM_MC_ENDIF();
14412 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14413 IEM_MC_ENDIF();
14414 IEM_MC_USED_FPU();
14415 IEM_MC_ADVANCE_RIP();
14416
14417 IEM_MC_END();
14418 return VINF_SUCCESS;
14419}
14420
14421
14422/** Opcode 0xdb !11/3. */
14423FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14424{
14425 IEMOP_MNEMONIC("fisttp m32i");
14426 IEM_MC_BEGIN(3, 2);
14427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14428 IEM_MC_LOCAL(uint16_t, u16Fsw);
14429 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14430 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14432
14433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14436 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14437
14438 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14439 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14440 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14441 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14442 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14443 IEM_MC_ELSE()
14444 IEM_MC_IF_FCW_IM()
14445 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14446 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14447 IEM_MC_ENDIF();
14448 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14449 IEM_MC_ENDIF();
14450 IEM_MC_USED_FPU();
14451 IEM_MC_ADVANCE_RIP();
14452
14453 IEM_MC_END();
14454 return VINF_SUCCESS;
14455}
14456
14457
14458/** Opcode 0xdb !11/5. */
14459FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14460{
14461 IEMOP_MNEMONIC("fld m80r");
14462
14463 IEM_MC_BEGIN(2, 3);
14464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14465 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14466 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14467 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14468 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14469
14470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14472
14473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14475 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14476
14477 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14478 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14479 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14480 IEM_MC_ELSE()
14481 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14482 IEM_MC_ENDIF();
14483 IEM_MC_USED_FPU();
14484 IEM_MC_ADVANCE_RIP();
14485
14486 IEM_MC_END();
14487 return VINF_SUCCESS;
14488}
14489
14490
14491/** Opcode 0xdb !11/7. */
14492FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14493{
14494 IEMOP_MNEMONIC("fstp m80r");
14495 IEM_MC_BEGIN(3, 2);
14496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14497 IEM_MC_LOCAL(uint16_t, u16Fsw);
14498 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14499 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14500 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14501
14502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14504 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14505 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14506
14507 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14508 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14509 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14510 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14511 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14512 IEM_MC_ELSE()
14513 IEM_MC_IF_FCW_IM()
14514 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14515 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14516 IEM_MC_ENDIF();
14517 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14518 IEM_MC_ENDIF();
14519 IEM_MC_USED_FPU();
14520 IEM_MC_ADVANCE_RIP();
14521
14522 IEM_MC_END();
14523 return VINF_SUCCESS;
14524}
14525
14526
14527/** Opcode 0xdb 11/0. */
14528FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14529{
14530 IEMOP_MNEMONIC("fcmovnb st0,stN");
14531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14532
14533 IEM_MC_BEGIN(0, 1);
14534 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14535
14536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14538
14539 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14540 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14541 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14542 IEM_MC_ENDIF();
14543 IEM_MC_UPDATE_FPU_OPCODE_IP();
14544 IEM_MC_ELSE()
14545 IEM_MC_FPU_STACK_UNDERFLOW(0);
14546 IEM_MC_ENDIF();
14547 IEM_MC_USED_FPU();
14548 IEM_MC_ADVANCE_RIP();
14549
14550 IEM_MC_END();
14551 return VINF_SUCCESS;
14552}
14553
14554
14555/** Opcode 0xdb 11/1. */
14556FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14557{
14558 IEMOP_MNEMONIC("fcmovne st0,stN");
14559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14560
14561 IEM_MC_BEGIN(0, 1);
14562 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14563
14564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14565 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14566
14567 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14568 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14569 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14570 IEM_MC_ENDIF();
14571 IEM_MC_UPDATE_FPU_OPCODE_IP();
14572 IEM_MC_ELSE()
14573 IEM_MC_FPU_STACK_UNDERFLOW(0);
14574 IEM_MC_ENDIF();
14575 IEM_MC_USED_FPU();
14576 IEM_MC_ADVANCE_RIP();
14577
14578 IEM_MC_END();
14579 return VINF_SUCCESS;
14580}
14581
14582
14583/** Opcode 0xdb 11/2. */
14584FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14585{
14586 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14588
14589 IEM_MC_BEGIN(0, 1);
14590 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14591
14592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14594
14595 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14596 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14597 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14598 IEM_MC_ENDIF();
14599 IEM_MC_UPDATE_FPU_OPCODE_IP();
14600 IEM_MC_ELSE()
14601 IEM_MC_FPU_STACK_UNDERFLOW(0);
14602 IEM_MC_ENDIF();
14603 IEM_MC_USED_FPU();
14604 IEM_MC_ADVANCE_RIP();
14605
14606 IEM_MC_END();
14607 return VINF_SUCCESS;
14608}
14609
14610
14611/** Opcode 0xdb 11/3. */
14612FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14613{
14614 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14616
14617 IEM_MC_BEGIN(0, 1);
14618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14619
14620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14622
14623 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14624 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14625 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14626 IEM_MC_ENDIF();
14627 IEM_MC_UPDATE_FPU_OPCODE_IP();
14628 IEM_MC_ELSE()
14629 IEM_MC_FPU_STACK_UNDERFLOW(0);
14630 IEM_MC_ENDIF();
14631 IEM_MC_USED_FPU();
14632 IEM_MC_ADVANCE_RIP();
14633
14634 IEM_MC_END();
14635 return VINF_SUCCESS;
14636}
14637
14638
14639/** Opcode 0xdb 0xe0. */
14640FNIEMOP_DEF(iemOp_fneni)
14641{
14642 IEMOP_MNEMONIC("fneni (8087/ign)");
14643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14644 IEM_MC_BEGIN(0,0);
14645 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14646 IEM_MC_ADVANCE_RIP();
14647 IEM_MC_END();
14648 return VINF_SUCCESS;
14649}
14650
14651
14652/** Opcode 0xdb 0xe1. */
14653FNIEMOP_DEF(iemOp_fndisi)
14654{
14655 IEMOP_MNEMONIC("fndisi (8087/ign)");
14656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14657 IEM_MC_BEGIN(0,0);
14658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14659 IEM_MC_ADVANCE_RIP();
14660 IEM_MC_END();
14661 return VINF_SUCCESS;
14662}
14663
14664
14665/** Opcode 0xdb 0xe2. */
14666FNIEMOP_DEF(iemOp_fnclex)
14667{
14668 IEMOP_MNEMONIC("fnclex");
14669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14670
14671 IEM_MC_BEGIN(0,0);
14672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14673 IEM_MC_CLEAR_FSW_EX();
14674 IEM_MC_ADVANCE_RIP();
14675 IEM_MC_END();
14676 return VINF_SUCCESS;
14677}
14678
14679
14680/** Opcode 0xdb 0xe3. */
14681FNIEMOP_DEF(iemOp_fninit)
14682{
14683 IEMOP_MNEMONIC("fninit");
14684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14685 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14686}
14687
14688
14689/** Opcode 0xdb 0xe4. */
14690FNIEMOP_DEF(iemOp_fnsetpm)
14691{
14692 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14694 IEM_MC_BEGIN(0,0);
14695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14696 IEM_MC_ADVANCE_RIP();
14697 IEM_MC_END();
14698 return VINF_SUCCESS;
14699}
14700
14701
14702/** Opcode 0xdb 0xe5. */
14703FNIEMOP_DEF(iemOp_frstpm)
14704{
14705 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14706#if 0 /* #UDs on newer CPUs */
14707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14708 IEM_MC_BEGIN(0,0);
14709 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14710 IEM_MC_ADVANCE_RIP();
14711 IEM_MC_END();
14712 return VINF_SUCCESS;
14713#else
14714 return IEMOP_RAISE_INVALID_OPCODE();
14715#endif
14716}
14717
14718
14719/** Opcode 0xdb 11/5. */
14720FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14721{
14722 IEMOP_MNEMONIC("fucomi st0,stN");
14723 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14724}
14725
14726
14727/** Opcode 0xdb 11/6. */
14728FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14729{
14730 IEMOP_MNEMONIC("fcomi st0,stN");
14731 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14732}
14733
14734
14735/** Opcode 0xdb. */
14736FNIEMOP_DEF(iemOp_EscF3)
14737{
14738 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14741 {
14742 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14743 {
14744 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14745 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14746 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14747 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14748 case 4:
14749 switch (bRm)
14750 {
14751 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14752 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14753 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14754 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14755 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14756 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14757 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14758 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14760 }
14761 break;
14762 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14763 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14764 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14766 }
14767 }
14768 else
14769 {
14770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14771 {
14772 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14773 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14774 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14775 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14776 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14777 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14778 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14779 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14781 }
14782 }
14783}
14784
14785
14786/**
14787 * Common worker for FPU instructions working on STn and ST0, and storing the
14788 * result in STn unless IE, DE or ZE was raised.
14789 *
14790 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14791 */
14792FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14793{
14794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14795
14796 IEM_MC_BEGIN(3, 1);
14797 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14798 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14800 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14801
14802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14804
14805 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14806 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14807 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14808 IEM_MC_ELSE()
14809 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14810 IEM_MC_ENDIF();
14811 IEM_MC_USED_FPU();
14812 IEM_MC_ADVANCE_RIP();
14813
14814 IEM_MC_END();
14815 return VINF_SUCCESS;
14816}
14817
14818
14819/** Opcode 0xdc 11/0. */
14820FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14821{
14822 IEMOP_MNEMONIC("fadd stN,st0");
14823 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14824}
14825
14826
14827/** Opcode 0xdc 11/1. */
14828FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14829{
14830 IEMOP_MNEMONIC("fmul stN,st0");
14831 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14832}
14833
14834
14835/** Opcode 0xdc 11/4. */
14836FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14837{
14838 IEMOP_MNEMONIC("fsubr stN,st0");
14839 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14840}
14841
14842
14843/** Opcode 0xdc 11/5. */
14844FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14845{
14846 IEMOP_MNEMONIC("fsub stN,st0");
14847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14848}
14849
14850
14851/** Opcode 0xdc 11/6. */
14852FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14853{
14854 IEMOP_MNEMONIC("fdivr stN,st0");
14855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14856}
14857
14858
14859/** Opcode 0xdc 11/7. */
14860FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14861{
14862 IEMOP_MNEMONIC("fdiv stN,st0");
14863 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14864}
14865
14866
14867/**
14868 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14869 * memory operand, and storing the result in ST0.
14870 *
14871 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14872 */
14873FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14874{
14875 IEM_MC_BEGIN(3, 3);
14876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14877 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14878 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14879 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14880 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14881 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14882
14883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14886 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14887
14888 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14889 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14890 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14891 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14892 IEM_MC_ELSE()
14893 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14894 IEM_MC_ENDIF();
14895 IEM_MC_USED_FPU();
14896 IEM_MC_ADVANCE_RIP();
14897
14898 IEM_MC_END();
14899 return VINF_SUCCESS;
14900}
14901
14902
14903/** Opcode 0xdc !11/0. */
14904FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14905{
14906 IEMOP_MNEMONIC("fadd m64r");
14907 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14908}
14909
14910
14911/** Opcode 0xdc !11/1. */
14912FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14913{
14914 IEMOP_MNEMONIC("fmul m64r");
14915 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14916}
14917
14918
14919/** Opcode 0xdc !11/2. */
14920FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14921{
14922 IEMOP_MNEMONIC("fcom st0,m64r");
14923
14924 IEM_MC_BEGIN(3, 3);
14925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14926 IEM_MC_LOCAL(uint16_t, u16Fsw);
14927 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14928 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14930 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14931
14932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14934
14935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14937 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14938
14939 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14940 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14941 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14942 IEM_MC_ELSE()
14943 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14944 IEM_MC_ENDIF();
14945 IEM_MC_USED_FPU();
14946 IEM_MC_ADVANCE_RIP();
14947
14948 IEM_MC_END();
14949 return VINF_SUCCESS;
14950}
14951
14952
14953/** Opcode 0xdc !11/3. */
14954FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14955{
14956 IEMOP_MNEMONIC("fcomp st0,m64r");
14957
14958 IEM_MC_BEGIN(3, 3);
14959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14960 IEM_MC_LOCAL(uint16_t, u16Fsw);
14961 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14962 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14963 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14964 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14965
14966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14968
14969 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14970 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14971 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14972
14973 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14974 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14975 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14976 IEM_MC_ELSE()
14977 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14978 IEM_MC_ENDIF();
14979 IEM_MC_USED_FPU();
14980 IEM_MC_ADVANCE_RIP();
14981
14982 IEM_MC_END();
14983 return VINF_SUCCESS;
14984}
14985
14986
14987/** Opcode 0xdc !11/4. */
14988FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
14989{
14990 IEMOP_MNEMONIC("fsub m64r");
14991 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
14992}
14993
14994
14995/** Opcode 0xdc !11/5. */
14996FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
14997{
14998 IEMOP_MNEMONIC("fsubr m64r");
14999 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15000}
15001
15002
15003/** Opcode 0xdc !11/6. */
15004FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15005{
15006 IEMOP_MNEMONIC("fdiv m64r");
15007 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15008}
15009
15010
15011/** Opcode 0xdc !11/7. */
15012FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15013{
15014 IEMOP_MNEMONIC("fdivr m64r");
15015 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15016}
15017
15018
15019/** Opcode 0xdc. */
15020FNIEMOP_DEF(iemOp_EscF4)
15021{
15022 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15025 {
15026 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15027 {
15028 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15029 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15030 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15031 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15032 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15033 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15034 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15035 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15037 }
15038 }
15039 else
15040 {
15041 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15042 {
15043 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15044 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15045 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15046 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15047 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15048 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15049 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15050 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15052 }
15053 }
15054}
15055
15056
15057/** Opcode 0xdd !11/0.
15058 * @sa iemOp_fld_m32r */
15059FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15060{
15061 IEMOP_MNEMONIC("fld m64r");
15062
15063 IEM_MC_BEGIN(2, 3);
15064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15065 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15066 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15067 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15068 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15069
15070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15074
15075 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15076 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15077 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15078 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15079 IEM_MC_ELSE()
15080 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15081 IEM_MC_ENDIF();
15082 IEM_MC_USED_FPU();
15083 IEM_MC_ADVANCE_RIP();
15084
15085 IEM_MC_END();
15086 return VINF_SUCCESS;
15087}
15088
15089
15090/** Opcode 0xdd !11/0. */
15091FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15092{
15093 IEMOP_MNEMONIC("fisttp m64i");
15094 IEM_MC_BEGIN(3, 2);
15095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15096 IEM_MC_LOCAL(uint16_t, u16Fsw);
15097 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15098 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15099 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15100
15101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15104 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15105
15106 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15107 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15108 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15109 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15110 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15111 IEM_MC_ELSE()
15112 IEM_MC_IF_FCW_IM()
15113 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15114 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15115 IEM_MC_ENDIF();
15116 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15117 IEM_MC_ENDIF();
15118 IEM_MC_USED_FPU();
15119 IEM_MC_ADVANCE_RIP();
15120
15121 IEM_MC_END();
15122 return VINF_SUCCESS;
15123}
15124
15125
15126/** Opcode 0xdd !11/0. */
15127FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15128{
15129 IEMOP_MNEMONIC("fst m64r");
15130 IEM_MC_BEGIN(3, 2);
15131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15132 IEM_MC_LOCAL(uint16_t, u16Fsw);
15133 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15134 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15135 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15136
15137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15139 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15140 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15141
15142 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15143 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15144 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15145 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15146 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15147 IEM_MC_ELSE()
15148 IEM_MC_IF_FCW_IM()
15149 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15150 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15151 IEM_MC_ENDIF();
15152 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15153 IEM_MC_ENDIF();
15154 IEM_MC_USED_FPU();
15155 IEM_MC_ADVANCE_RIP();
15156
15157 IEM_MC_END();
15158 return VINF_SUCCESS;
15159}
15160
15161
15162
15163
15164/** Opcode 0xdd !11/0. */
15165FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15166{
15167 IEMOP_MNEMONIC("fstp m64r");
15168 IEM_MC_BEGIN(3, 2);
15169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15170 IEM_MC_LOCAL(uint16_t, u16Fsw);
15171 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15172 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15173 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15174
15175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15179
15180 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15181 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15182 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15183 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15184 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15185 IEM_MC_ELSE()
15186 IEM_MC_IF_FCW_IM()
15187 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15188 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15189 IEM_MC_ENDIF();
15190 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15191 IEM_MC_ENDIF();
15192 IEM_MC_USED_FPU();
15193 IEM_MC_ADVANCE_RIP();
15194
15195 IEM_MC_END();
15196 return VINF_SUCCESS;
15197}
15198
15199
15200/** Opcode 0xdd !11/0. */
15201FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15202{
15203 IEMOP_MNEMONIC("frstor m94/108byte");
15204 IEM_MC_BEGIN(3, 0);
15205 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15206 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15207 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15210 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15211 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15212 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15213 IEM_MC_END();
15214 return VINF_SUCCESS;
15215}
15216
15217
15218/** Opcode 0xdd !11/0. */
15219FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15220{
15221 IEMOP_MNEMONIC("fnsave m94/108byte");
15222 IEM_MC_BEGIN(3, 0);
15223 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15224 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15225 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15229 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15230 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15231 IEM_MC_END();
15232 return VINF_SUCCESS;
15233
15234}
15235
15236/** Opcode 0xdd !11/0. */
15237FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15238{
15239 IEMOP_MNEMONIC("fnstsw m16");
15240
15241 IEM_MC_BEGIN(0, 2);
15242 IEM_MC_LOCAL(uint16_t, u16Tmp);
15243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15244
15245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15248
15249 IEM_MC_FETCH_FSW(u16Tmp);
15250 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15251 IEM_MC_ADVANCE_RIP();
15252
15253/** @todo Debug / drop a hint to the verifier that things may differ
15254 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15255 * NT4SP1. (X86_FSW_PE) */
15256 IEM_MC_END();
15257 return VINF_SUCCESS;
15258}
15259
15260
15261/** Opcode 0xdd 11/0. */
15262FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15263{
15264 IEMOP_MNEMONIC("ffree stN");
15265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15266 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15267 unmodified. */
15268
15269 IEM_MC_BEGIN(0, 0);
15270
15271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15273
15274 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15275 IEM_MC_UPDATE_FPU_OPCODE_IP();
15276
15277 IEM_MC_USED_FPU();
15278 IEM_MC_ADVANCE_RIP();
15279 IEM_MC_END();
15280 return VINF_SUCCESS;
15281}
15282
15283
15284/** Opcode 0xdd 11/1. */
15285FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15286{
15287 IEMOP_MNEMONIC("fst st0,stN");
15288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15289
15290 IEM_MC_BEGIN(0, 2);
15291 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15292 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15295 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15296 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15297 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15298 IEM_MC_ELSE()
15299 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15300 IEM_MC_ENDIF();
15301 IEM_MC_USED_FPU();
15302 IEM_MC_ADVANCE_RIP();
15303 IEM_MC_END();
15304 return VINF_SUCCESS;
15305}
15306
15307
15308/** Opcode 0xdd 11/3. */
15309FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15310{
15311 IEMOP_MNEMONIC("fcom st0,stN");
15312 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15313}
15314
15315
15316/** Opcode 0xdd 11/4. */
15317FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15318{
15319 IEMOP_MNEMONIC("fcomp st0,stN");
15320 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15321}
15322
15323
15324/** Opcode 0xdd. */
15325FNIEMOP_DEF(iemOp_EscF5)
15326{
15327 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15330 {
15331 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15332 {
15333 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15334 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15335 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15336 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15337 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15338 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15339 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15340 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15342 }
15343 }
15344 else
15345 {
15346 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15347 {
15348 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15349 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15350 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15351 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15352 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15353 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15354 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15355 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15357 }
15358 }
15359}
15360
15361
15362/** Opcode 0xde 11/0. */
15363FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15364{
15365 IEMOP_MNEMONIC("faddp stN,st0");
15366 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15367}
15368
15369
15370/** Opcode 0xde 11/0. */
15371FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15372{
15373 IEMOP_MNEMONIC("fmulp stN,st0");
15374 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15375}
15376
15377
15378/** Opcode 0xde 0xd9. */
15379FNIEMOP_DEF(iemOp_fcompp)
15380{
15381 IEMOP_MNEMONIC("fucompp st0,stN");
15382 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15383}
15384
15385
15386/** Opcode 0xde 11/4. */
15387FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15388{
15389 IEMOP_MNEMONIC("fsubrp stN,st0");
15390 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15391}
15392
15393
15394/** Opcode 0xde 11/5. */
15395FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15396{
15397 IEMOP_MNEMONIC("fsubp stN,st0");
15398 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15399}
15400
15401
15402/** Opcode 0xde 11/6. */
15403FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15404{
15405 IEMOP_MNEMONIC("fdivrp stN,st0");
15406 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15407}
15408
15409
15410/** Opcode 0xde 11/7. */
15411FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15412{
15413 IEMOP_MNEMONIC("fdivp stN,st0");
15414 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15415}
15416
15417
15418/**
15419 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15420 * the result in ST0.
15421 *
15422 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15423 */
15424FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15425{
15426 IEM_MC_BEGIN(3, 3);
15427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15428 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15429 IEM_MC_LOCAL(int16_t, i16Val2);
15430 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15432 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15433
15434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15436
15437 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15438 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15439 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15440
15441 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15442 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15443 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15444 IEM_MC_ELSE()
15445 IEM_MC_FPU_STACK_UNDERFLOW(0);
15446 IEM_MC_ENDIF();
15447 IEM_MC_USED_FPU();
15448 IEM_MC_ADVANCE_RIP();
15449
15450 IEM_MC_END();
15451 return VINF_SUCCESS;
15452}
15453
15454
15455/** Opcode 0xde !11/0. */
15456FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15457{
15458 IEMOP_MNEMONIC("fiadd m16i");
15459 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15460}
15461
15462
15463/** Opcode 0xde !11/1. */
15464FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15465{
15466 IEMOP_MNEMONIC("fimul m16i");
15467 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15468}
15469
15470
15471/** Opcode 0xde !11/2. */
15472FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15473{
15474 IEMOP_MNEMONIC("ficom st0,m16i");
15475
15476 IEM_MC_BEGIN(3, 3);
15477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15478 IEM_MC_LOCAL(uint16_t, u16Fsw);
15479 IEM_MC_LOCAL(int16_t, i16Val2);
15480 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15481 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15482 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15483
15484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15486
15487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15488 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15489 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15490
15491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15493 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15494 IEM_MC_ELSE()
15495 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15496 IEM_MC_ENDIF();
15497 IEM_MC_USED_FPU();
15498 IEM_MC_ADVANCE_RIP();
15499
15500 IEM_MC_END();
15501 return VINF_SUCCESS;
15502}
15503
15504
15505/** Opcode 0xde !11/3. */
15506FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15507{
15508 IEMOP_MNEMONIC("ficomp st0,m16i");
15509
15510 IEM_MC_BEGIN(3, 3);
15511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15512 IEM_MC_LOCAL(uint16_t, u16Fsw);
15513 IEM_MC_LOCAL(int16_t, i16Val2);
15514 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15515 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15516 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15517
15518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15520
15521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15523 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15524
15525 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15526 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15527 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15528 IEM_MC_ELSE()
15529 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15530 IEM_MC_ENDIF();
15531 IEM_MC_USED_FPU();
15532 IEM_MC_ADVANCE_RIP();
15533
15534 IEM_MC_END();
15535 return VINF_SUCCESS;
15536}
15537
15538
15539/** Opcode 0xde !11/4. */
15540FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15541{
15542 IEMOP_MNEMONIC("fisub m16i");
15543 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15544}
15545
15546
15547/** Opcode 0xde !11/5. */
15548FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15549{
15550 IEMOP_MNEMONIC("fisubr m16i");
15551 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15552}
15553
15554
15555/** Opcode 0xde !11/6. */
15556FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15557{
15558 IEMOP_MNEMONIC("fiadd m16i");
15559 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15560}
15561
15562
15563/** Opcode 0xde !11/7. */
15564FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15565{
15566 IEMOP_MNEMONIC("fiadd m16i");
15567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15568}
15569
15570
15571/** Opcode 0xde. */
15572FNIEMOP_DEF(iemOp_EscF6)
15573{
15574 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15577 {
15578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15579 {
15580 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15581 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15582 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15583 case 3: if (bRm == 0xd9)
15584 return FNIEMOP_CALL(iemOp_fcompp);
15585 return IEMOP_RAISE_INVALID_OPCODE();
15586 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15587 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15588 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15589 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15591 }
15592 }
15593 else
15594 {
15595 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15596 {
15597 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15598 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15599 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15600 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15601 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15602 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15603 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15604 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15606 }
15607 }
15608}
15609
15610
15611/** Opcode 0xdf 11/0.
15612 * Undocument instruction, assumed to work like ffree + fincstp. */
15613FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15614{
15615 IEMOP_MNEMONIC("ffreep stN");
15616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15617
15618 IEM_MC_BEGIN(0, 0);
15619
15620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15622
15623 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15624 IEM_MC_FPU_STACK_INC_TOP();
15625 IEM_MC_UPDATE_FPU_OPCODE_IP();
15626
15627 IEM_MC_USED_FPU();
15628 IEM_MC_ADVANCE_RIP();
15629 IEM_MC_END();
15630 return VINF_SUCCESS;
15631}
15632
15633
15634/** Opcode 0xdf 0xe0. */
15635FNIEMOP_DEF(iemOp_fnstsw_ax)
15636{
15637 IEMOP_MNEMONIC("fnstsw ax");
15638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15639
15640 IEM_MC_BEGIN(0, 1);
15641 IEM_MC_LOCAL(uint16_t, u16Tmp);
15642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15643 IEM_MC_FETCH_FSW(u16Tmp);
15644 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15645 IEM_MC_ADVANCE_RIP();
15646 IEM_MC_END();
15647 return VINF_SUCCESS;
15648}
15649
15650
15651/** Opcode 0xdf 11/5. */
15652FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15653{
15654 IEMOP_MNEMONIC("fcomip st0,stN");
15655 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15656}
15657
15658
15659/** Opcode 0xdf 11/6. */
15660FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15661{
15662 IEMOP_MNEMONIC("fcomip st0,stN");
15663 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15664}
15665
15666
15667/** Opcode 0xdf !11/0. */
15668FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15669{
15670 IEMOP_MNEMONIC("fild m16i");
15671
15672 IEM_MC_BEGIN(2, 3);
15673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15674 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15675 IEM_MC_LOCAL(int16_t, i16Val);
15676 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15677 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15678
15679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15681
15682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15684 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15685
15686 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15687 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15688 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15689 IEM_MC_ELSE()
15690 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15691 IEM_MC_ENDIF();
15692 IEM_MC_USED_FPU();
15693 IEM_MC_ADVANCE_RIP();
15694
15695 IEM_MC_END();
15696 return VINF_SUCCESS;
15697}
15698
15699
15700/** Opcode 0xdf !11/1. */
15701FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15702{
15703 IEMOP_MNEMONIC("fisttp m16i");
15704 IEM_MC_BEGIN(3, 2);
15705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15706 IEM_MC_LOCAL(uint16_t, u16Fsw);
15707 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15708 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15709 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15710
15711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15715
15716 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15717 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15718 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15719 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15720 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15721 IEM_MC_ELSE()
15722 IEM_MC_IF_FCW_IM()
15723 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15724 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15725 IEM_MC_ENDIF();
15726 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15727 IEM_MC_ENDIF();
15728 IEM_MC_USED_FPU();
15729 IEM_MC_ADVANCE_RIP();
15730
15731 IEM_MC_END();
15732 return VINF_SUCCESS;
15733}
15734
15735
15736/** Opcode 0xdf !11/2. */
15737FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15738{
15739 IEMOP_MNEMONIC("fistp m16i");
15740 IEM_MC_BEGIN(3, 2);
15741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15742 IEM_MC_LOCAL(uint16_t, u16Fsw);
15743 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15744 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15746
15747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15749 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15750 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15751
15752 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15753 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15754 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15755 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15756 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15757 IEM_MC_ELSE()
15758 IEM_MC_IF_FCW_IM()
15759 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15760 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15761 IEM_MC_ENDIF();
15762 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15763 IEM_MC_ENDIF();
15764 IEM_MC_USED_FPU();
15765 IEM_MC_ADVANCE_RIP();
15766
15767 IEM_MC_END();
15768 return VINF_SUCCESS;
15769}
15770
15771
15772/** Opcode 0xdf !11/3. */
15773FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15774{
15775 IEMOP_MNEMONIC("fistp m16i");
15776 IEM_MC_BEGIN(3, 2);
15777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15778 IEM_MC_LOCAL(uint16_t, u16Fsw);
15779 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15780 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15781 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15782
15783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15787
15788 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15790 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15791 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15792 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15793 IEM_MC_ELSE()
15794 IEM_MC_IF_FCW_IM()
15795 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15796 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15797 IEM_MC_ENDIF();
15798 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15799 IEM_MC_ENDIF();
15800 IEM_MC_USED_FPU();
15801 IEM_MC_ADVANCE_RIP();
15802
15803 IEM_MC_END();
15804 return VINF_SUCCESS;
15805}
15806
15807
15808/** Opcode 0xdf !11/4. */
15809FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15810
15811
15812/** Opcode 0xdf !11/5. */
15813FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15814{
15815 IEMOP_MNEMONIC("fild m64i");
15816
15817 IEM_MC_BEGIN(2, 3);
15818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15819 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15820 IEM_MC_LOCAL(int64_t, i64Val);
15821 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15822 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15823
15824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15826
15827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15829 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15830
15831 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15832 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15833 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15834 IEM_MC_ELSE()
15835 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15836 IEM_MC_ENDIF();
15837 IEM_MC_USED_FPU();
15838 IEM_MC_ADVANCE_RIP();
15839
15840 IEM_MC_END();
15841 return VINF_SUCCESS;
15842}
15843
15844
15845/** Opcode 0xdf !11/6. */
15846FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15847
15848
15849/** Opcode 0xdf !11/7. */
15850FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15851{
15852 IEMOP_MNEMONIC("fistp m64i");
15853 IEM_MC_BEGIN(3, 2);
15854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15855 IEM_MC_LOCAL(uint16_t, u16Fsw);
15856 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15857 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15858 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15859
15860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15864
15865 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15866 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15867 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15868 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15869 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15870 IEM_MC_ELSE()
15871 IEM_MC_IF_FCW_IM()
15872 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15873 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15874 IEM_MC_ENDIF();
15875 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15876 IEM_MC_ENDIF();
15877 IEM_MC_USED_FPU();
15878 IEM_MC_ADVANCE_RIP();
15879
15880 IEM_MC_END();
15881 return VINF_SUCCESS;
15882}
15883
15884
15885/** Opcode 0xdf. */
15886FNIEMOP_DEF(iemOp_EscF7)
15887{
15888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15890 {
15891 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15892 {
15893 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15894 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15895 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15896 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15897 case 4: if (bRm == 0xe0)
15898 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15899 return IEMOP_RAISE_INVALID_OPCODE();
15900 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15901 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15902 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15904 }
15905 }
15906 else
15907 {
15908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15909 {
15910 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15911 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15912 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15913 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15914 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15915 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15916 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15917 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15919 }
15920 }
15921}
15922
15923
15924/** Opcode 0xe0. */
15925FNIEMOP_DEF(iemOp_loopne_Jb)
15926{
15927 IEMOP_MNEMONIC("loopne Jb");
15928 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15929 IEMOP_HLP_NO_LOCK_PREFIX();
15930 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15931
15932 switch (pIemCpu->enmEffAddrMode)
15933 {
15934 case IEMMODE_16BIT:
15935 IEM_MC_BEGIN(0,0);
15936 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15937 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15938 IEM_MC_REL_JMP_S8(i8Imm);
15939 } IEM_MC_ELSE() {
15940 IEM_MC_ADVANCE_RIP();
15941 } IEM_MC_ENDIF();
15942 IEM_MC_END();
15943 return VINF_SUCCESS;
15944
15945 case IEMMODE_32BIT:
15946 IEM_MC_BEGIN(0,0);
15947 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15948 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15949 IEM_MC_REL_JMP_S8(i8Imm);
15950 } IEM_MC_ELSE() {
15951 IEM_MC_ADVANCE_RIP();
15952 } IEM_MC_ENDIF();
15953 IEM_MC_END();
15954 return VINF_SUCCESS;
15955
15956 case IEMMODE_64BIT:
15957 IEM_MC_BEGIN(0,0);
15958 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15959 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15960 IEM_MC_REL_JMP_S8(i8Imm);
15961 } IEM_MC_ELSE() {
15962 IEM_MC_ADVANCE_RIP();
15963 } IEM_MC_ENDIF();
15964 IEM_MC_END();
15965 return VINF_SUCCESS;
15966
15967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15968 }
15969}
15970
15971
15972/** Opcode 0xe1. */
15973FNIEMOP_DEF(iemOp_loope_Jb)
15974{
15975 IEMOP_MNEMONIC("loope Jb");
15976 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15977 IEMOP_HLP_NO_LOCK_PREFIX();
15978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15979
15980 switch (pIemCpu->enmEffAddrMode)
15981 {
15982 case IEMMODE_16BIT:
15983 IEM_MC_BEGIN(0,0);
15984 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15985 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15986 IEM_MC_REL_JMP_S8(i8Imm);
15987 } IEM_MC_ELSE() {
15988 IEM_MC_ADVANCE_RIP();
15989 } IEM_MC_ENDIF();
15990 IEM_MC_END();
15991 return VINF_SUCCESS;
15992
15993 case IEMMODE_32BIT:
15994 IEM_MC_BEGIN(0,0);
15995 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15996 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15997 IEM_MC_REL_JMP_S8(i8Imm);
15998 } IEM_MC_ELSE() {
15999 IEM_MC_ADVANCE_RIP();
16000 } IEM_MC_ENDIF();
16001 IEM_MC_END();
16002 return VINF_SUCCESS;
16003
16004 case IEMMODE_64BIT:
16005 IEM_MC_BEGIN(0,0);
16006 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16007 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16008 IEM_MC_REL_JMP_S8(i8Imm);
16009 } IEM_MC_ELSE() {
16010 IEM_MC_ADVANCE_RIP();
16011 } IEM_MC_ENDIF();
16012 IEM_MC_END();
16013 return VINF_SUCCESS;
16014
16015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16016 }
16017}
16018
16019
16020/** Opcode 0xe2. */
16021FNIEMOP_DEF(iemOp_loop_Jb)
16022{
16023 IEMOP_MNEMONIC("loop Jb");
16024 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16025 IEMOP_HLP_NO_LOCK_PREFIX();
16026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16027
16028 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16029 * using the 32-bit operand size override. How can that be restarted? See
16030 * weird pseudo code in intel manual. */
16031 switch (pIemCpu->enmEffAddrMode)
16032 {
16033 case IEMMODE_16BIT:
16034 IEM_MC_BEGIN(0,0);
16035 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16036 {
16037 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16038 IEM_MC_IF_CX_IS_NZ() {
16039 IEM_MC_REL_JMP_S8(i8Imm);
16040 } IEM_MC_ELSE() {
16041 IEM_MC_ADVANCE_RIP();
16042 } IEM_MC_ENDIF();
16043 }
16044 else
16045 {
16046 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16047 IEM_MC_ADVANCE_RIP();
16048 }
16049 IEM_MC_END();
16050 return VINF_SUCCESS;
16051
16052 case IEMMODE_32BIT:
16053 IEM_MC_BEGIN(0,0);
16054 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16055 {
16056 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16057 IEM_MC_IF_ECX_IS_NZ() {
16058 IEM_MC_REL_JMP_S8(i8Imm);
16059 } IEM_MC_ELSE() {
16060 IEM_MC_ADVANCE_RIP();
16061 } IEM_MC_ENDIF();
16062 }
16063 else
16064 {
16065 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16066 IEM_MC_ADVANCE_RIP();
16067 }
16068 IEM_MC_END();
16069 return VINF_SUCCESS;
16070
16071 case IEMMODE_64BIT:
16072 IEM_MC_BEGIN(0,0);
16073 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16074 {
16075 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16076 IEM_MC_IF_RCX_IS_NZ() {
16077 IEM_MC_REL_JMP_S8(i8Imm);
16078 } IEM_MC_ELSE() {
16079 IEM_MC_ADVANCE_RIP();
16080 } IEM_MC_ENDIF();
16081 }
16082 else
16083 {
16084 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16085 IEM_MC_ADVANCE_RIP();
16086 }
16087 IEM_MC_END();
16088 return VINF_SUCCESS;
16089
16090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16091 }
16092}
16093
16094
16095/** Opcode 0xe3. */
16096FNIEMOP_DEF(iemOp_jecxz_Jb)
16097{
16098 IEMOP_MNEMONIC("jecxz Jb");
16099 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16100 IEMOP_HLP_NO_LOCK_PREFIX();
16101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16102
16103 switch (pIemCpu->enmEffAddrMode)
16104 {
16105 case IEMMODE_16BIT:
16106 IEM_MC_BEGIN(0,0);
16107 IEM_MC_IF_CX_IS_NZ() {
16108 IEM_MC_ADVANCE_RIP();
16109 } IEM_MC_ELSE() {
16110 IEM_MC_REL_JMP_S8(i8Imm);
16111 } IEM_MC_ENDIF();
16112 IEM_MC_END();
16113 return VINF_SUCCESS;
16114
16115 case IEMMODE_32BIT:
16116 IEM_MC_BEGIN(0,0);
16117 IEM_MC_IF_ECX_IS_NZ() {
16118 IEM_MC_ADVANCE_RIP();
16119 } IEM_MC_ELSE() {
16120 IEM_MC_REL_JMP_S8(i8Imm);
16121 } IEM_MC_ENDIF();
16122 IEM_MC_END();
16123 return VINF_SUCCESS;
16124
16125 case IEMMODE_64BIT:
16126 IEM_MC_BEGIN(0,0);
16127 IEM_MC_IF_RCX_IS_NZ() {
16128 IEM_MC_ADVANCE_RIP();
16129 } IEM_MC_ELSE() {
16130 IEM_MC_REL_JMP_S8(i8Imm);
16131 } IEM_MC_ENDIF();
16132 IEM_MC_END();
16133 return VINF_SUCCESS;
16134
16135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16136 }
16137}
16138
16139
16140/** Opcode 0xe4 */
16141FNIEMOP_DEF(iemOp_in_AL_Ib)
16142{
16143 IEMOP_MNEMONIC("in eAX,Ib");
16144 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16145 IEMOP_HLP_NO_LOCK_PREFIX();
16146 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16147}
16148
16149
16150/** Opcode 0xe5 */
16151FNIEMOP_DEF(iemOp_in_eAX_Ib)
16152{
16153 IEMOP_MNEMONIC("in eAX,Ib");
16154 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16155 IEMOP_HLP_NO_LOCK_PREFIX();
16156 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16157}
16158
16159
16160/** Opcode 0xe6 */
16161FNIEMOP_DEF(iemOp_out_Ib_AL)
16162{
16163 IEMOP_MNEMONIC("out Ib,AL");
16164 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16165 IEMOP_HLP_NO_LOCK_PREFIX();
16166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16167}
16168
16169
16170/** Opcode 0xe7 */
16171FNIEMOP_DEF(iemOp_out_Ib_eAX)
16172{
16173 IEMOP_MNEMONIC("out Ib,eAX");
16174 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16175 IEMOP_HLP_NO_LOCK_PREFIX();
16176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16177}
16178
16179
16180/** Opcode 0xe8. */
16181FNIEMOP_DEF(iemOp_call_Jv)
16182{
16183 IEMOP_MNEMONIC("call Jv");
16184 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16185 switch (pIemCpu->enmEffOpSize)
16186 {
16187 case IEMMODE_16BIT:
16188 {
16189 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16190 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16191 }
16192
16193 case IEMMODE_32BIT:
16194 {
16195 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16196 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16197 }
16198
16199 case IEMMODE_64BIT:
16200 {
16201 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16202 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16203 }
16204
16205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16206 }
16207}
16208
16209
16210/** Opcode 0xe9. */
16211FNIEMOP_DEF(iemOp_jmp_Jv)
16212{
16213 IEMOP_MNEMONIC("jmp Jv");
16214 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16215 switch (pIemCpu->enmEffOpSize)
16216 {
16217 case IEMMODE_16BIT:
16218 {
16219 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16220 IEM_MC_BEGIN(0, 0);
16221 IEM_MC_REL_JMP_S16(i16Imm);
16222 IEM_MC_END();
16223 return VINF_SUCCESS;
16224 }
16225
16226 case IEMMODE_64BIT:
16227 case IEMMODE_32BIT:
16228 {
16229 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16230 IEM_MC_BEGIN(0, 0);
16231 IEM_MC_REL_JMP_S32(i32Imm);
16232 IEM_MC_END();
16233 return VINF_SUCCESS;
16234 }
16235
16236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16237 }
16238}
16239
16240
16241/** Opcode 0xea. */
16242FNIEMOP_DEF(iemOp_jmp_Ap)
16243{
16244 IEMOP_MNEMONIC("jmp Ap");
16245 IEMOP_HLP_NO_64BIT();
16246
16247 /* Decode the far pointer address and pass it on to the far call C implementation. */
16248 uint32_t offSeg;
16249 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16250 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16251 else
16252 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16253 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16254 IEMOP_HLP_NO_LOCK_PREFIX();
16255 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16256}
16257
16258
16259/** Opcode 0xeb. */
16260FNIEMOP_DEF(iemOp_jmp_Jb)
16261{
16262 IEMOP_MNEMONIC("jmp Jb");
16263 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16264 IEMOP_HLP_NO_LOCK_PREFIX();
16265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16266
16267 IEM_MC_BEGIN(0, 0);
16268 IEM_MC_REL_JMP_S8(i8Imm);
16269 IEM_MC_END();
16270 return VINF_SUCCESS;
16271}
16272
16273
16274/** Opcode 0xec */
16275FNIEMOP_DEF(iemOp_in_AL_DX)
16276{
16277 IEMOP_MNEMONIC("in AL,DX");
16278 IEMOP_HLP_NO_LOCK_PREFIX();
16279 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16280}
16281
16282
16283/** Opcode 0xed */
16284FNIEMOP_DEF(iemOp_eAX_DX)
16285{
16286 IEMOP_MNEMONIC("in eAX,DX");
16287 IEMOP_HLP_NO_LOCK_PREFIX();
16288 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16289}
16290
16291
16292/** Opcode 0xee */
16293FNIEMOP_DEF(iemOp_out_DX_AL)
16294{
16295 IEMOP_MNEMONIC("out DX,AL");
16296 IEMOP_HLP_NO_LOCK_PREFIX();
16297 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16298}
16299
16300
16301/** Opcode 0xef */
16302FNIEMOP_DEF(iemOp_out_DX_eAX)
16303{
16304 IEMOP_MNEMONIC("out DX,eAX");
16305 IEMOP_HLP_NO_LOCK_PREFIX();
16306 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16307}
16308
16309
16310/** Opcode 0xf0. */
16311FNIEMOP_DEF(iemOp_lock)
16312{
16313 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16314 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16315
16316 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16317 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16318}
16319
16320
16321/** Opcode 0xf1. */
16322FNIEMOP_DEF(iemOp_int_1)
16323{
16324 IEMOP_MNEMONIC("int1"); /* icebp */
16325 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16326 /** @todo testcase! */
16327 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16328}
16329
16330
16331/** Opcode 0xf2. */
16332FNIEMOP_DEF(iemOp_repne)
16333{
16334 /* This overrides any previous REPE prefix. */
16335 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16336 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16337 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16338
16339 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16340 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16341}
16342
16343
16344/** Opcode 0xf3. */
16345FNIEMOP_DEF(iemOp_repe)
16346{
16347 /* This overrides any previous REPNE prefix. */
16348 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16349 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16350 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16351
16352 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16353 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16354}
16355
16356
16357/** Opcode 0xf4. */
16358FNIEMOP_DEF(iemOp_hlt)
16359{
16360 IEMOP_HLP_NO_LOCK_PREFIX();
16361#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC && 0
16362 if ( pIemCpu->uTargetCpu == IEMTARGETCPU_CURRENT
16363 && pIemCpu->CTX_SUFF(pCtx)->cs.Sel <= 1000)
16364 {
16365 pIemCpu->uTargetCpu = IEMTARGETCPU_286;
16366 LogAlways(("\niemOp_hlt: Enabled CPU restrictions!\n\n"));
16367 }
16368#endif
16369 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16370}
16371
16372
16373/** Opcode 0xf5. */
16374FNIEMOP_DEF(iemOp_cmc)
16375{
16376 IEMOP_MNEMONIC("cmc");
16377 IEMOP_HLP_NO_LOCK_PREFIX();
16378 IEM_MC_BEGIN(0, 0);
16379 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16380 IEM_MC_ADVANCE_RIP();
16381 IEM_MC_END();
16382 return VINF_SUCCESS;
16383}
16384
16385
16386/**
16387 * Common implementation of 'inc/dec/not/neg Eb'.
16388 *
16389 * @param bRm The RM byte.
16390 * @param pImpl The instruction implementation.
16391 */
16392FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16393{
16394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16395 {
16396 /* register access */
16397 IEM_MC_BEGIN(2, 0);
16398 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16399 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16400 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16401 IEM_MC_REF_EFLAGS(pEFlags);
16402 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16403 IEM_MC_ADVANCE_RIP();
16404 IEM_MC_END();
16405 }
16406 else
16407 {
16408 /* memory access. */
16409 IEM_MC_BEGIN(2, 2);
16410 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16413
16414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16415 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16416 IEM_MC_FETCH_EFLAGS(EFlags);
16417 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16418 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16419 else
16420 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16421
16422 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16423 IEM_MC_COMMIT_EFLAGS(EFlags);
16424 IEM_MC_ADVANCE_RIP();
16425 IEM_MC_END();
16426 }
16427 return VINF_SUCCESS;
16428}
16429
16430
16431/**
16432 * Common implementation of 'inc/dec/not/neg Ev'.
16433 *
16434 * @param bRm The RM byte.
16435 * @param pImpl The instruction implementation.
16436 */
16437FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16438{
16439 /* Registers are handled by a common worker. */
16440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16441 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16442
16443 /* Memory we do here. */
16444 switch (pIemCpu->enmEffOpSize)
16445 {
16446 case IEMMODE_16BIT:
16447 IEM_MC_BEGIN(2, 2);
16448 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16449 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16451
16452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16453 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16454 IEM_MC_FETCH_EFLAGS(EFlags);
16455 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16456 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16457 else
16458 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16459
16460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16461 IEM_MC_COMMIT_EFLAGS(EFlags);
16462 IEM_MC_ADVANCE_RIP();
16463 IEM_MC_END();
16464 return VINF_SUCCESS;
16465
16466 case IEMMODE_32BIT:
16467 IEM_MC_BEGIN(2, 2);
16468 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16471
16472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16473 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16474 IEM_MC_FETCH_EFLAGS(EFlags);
16475 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16476 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16477 else
16478 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16479
16480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16481 IEM_MC_COMMIT_EFLAGS(EFlags);
16482 IEM_MC_ADVANCE_RIP();
16483 IEM_MC_END();
16484 return VINF_SUCCESS;
16485
16486 case IEMMODE_64BIT:
16487 IEM_MC_BEGIN(2, 2);
16488 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16491
16492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16493 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16494 IEM_MC_FETCH_EFLAGS(EFlags);
16495 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16496 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16497 else
16498 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16499
16500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16501 IEM_MC_COMMIT_EFLAGS(EFlags);
16502 IEM_MC_ADVANCE_RIP();
16503 IEM_MC_END();
16504 return VINF_SUCCESS;
16505
16506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16507 }
16508}
16509
16510
16511/** Opcode 0xf6 /0. */
16512FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16513{
16514 IEMOP_MNEMONIC("test Eb,Ib");
16515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16516
16517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16518 {
16519 /* register access */
16520 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16521 IEMOP_HLP_NO_LOCK_PREFIX();
16522
16523 IEM_MC_BEGIN(3, 0);
16524 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16525 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16527 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16528 IEM_MC_REF_EFLAGS(pEFlags);
16529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16530 IEM_MC_ADVANCE_RIP();
16531 IEM_MC_END();
16532 }
16533 else
16534 {
16535 /* memory access. */
16536 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16537
16538 IEM_MC_BEGIN(3, 2);
16539 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16540 IEM_MC_ARG(uint8_t, u8Src, 1);
16541 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16543
16544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16545 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16546 IEM_MC_ASSIGN(u8Src, u8Imm);
16547 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16548 IEM_MC_FETCH_EFLAGS(EFlags);
16549 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16550
16551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16552 IEM_MC_COMMIT_EFLAGS(EFlags);
16553 IEM_MC_ADVANCE_RIP();
16554 IEM_MC_END();
16555 }
16556 return VINF_SUCCESS;
16557}
16558
16559
16560/** Opcode 0xf7 /0. */
16561FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16562{
16563 IEMOP_MNEMONIC("test Ev,Iv");
16564 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16566
16567 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16568 {
16569 /* register access */
16570 switch (pIemCpu->enmEffOpSize)
16571 {
16572 case IEMMODE_16BIT:
16573 {
16574 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16575 IEM_MC_BEGIN(3, 0);
16576 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16577 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16579 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16580 IEM_MC_REF_EFLAGS(pEFlags);
16581 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16582 IEM_MC_ADVANCE_RIP();
16583 IEM_MC_END();
16584 return VINF_SUCCESS;
16585 }
16586
16587 case IEMMODE_32BIT:
16588 {
16589 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16590 IEM_MC_BEGIN(3, 0);
16591 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16592 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16594 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16595 IEM_MC_REF_EFLAGS(pEFlags);
16596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16597 /* No clearing the high dword here - test doesn't write back the result. */
16598 IEM_MC_ADVANCE_RIP();
16599 IEM_MC_END();
16600 return VINF_SUCCESS;
16601 }
16602
16603 case IEMMODE_64BIT:
16604 {
16605 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16606 IEM_MC_BEGIN(3, 0);
16607 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16608 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16610 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16611 IEM_MC_REF_EFLAGS(pEFlags);
16612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16613 IEM_MC_ADVANCE_RIP();
16614 IEM_MC_END();
16615 return VINF_SUCCESS;
16616 }
16617
16618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16619 }
16620 }
16621 else
16622 {
16623 /* memory access. */
16624 switch (pIemCpu->enmEffOpSize)
16625 {
16626 case IEMMODE_16BIT:
16627 {
16628 IEM_MC_BEGIN(3, 2);
16629 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16630 IEM_MC_ARG(uint16_t, u16Src, 1);
16631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16633
16634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16635 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16636 IEM_MC_ASSIGN(u16Src, u16Imm);
16637 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16638 IEM_MC_FETCH_EFLAGS(EFlags);
16639 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16640
16641 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16642 IEM_MC_COMMIT_EFLAGS(EFlags);
16643 IEM_MC_ADVANCE_RIP();
16644 IEM_MC_END();
16645 return VINF_SUCCESS;
16646 }
16647
16648 case IEMMODE_32BIT:
16649 {
16650 IEM_MC_BEGIN(3, 2);
16651 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16652 IEM_MC_ARG(uint32_t, u32Src, 1);
16653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16655
16656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16657 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16658 IEM_MC_ASSIGN(u32Src, u32Imm);
16659 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16660 IEM_MC_FETCH_EFLAGS(EFlags);
16661 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16662
16663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16664 IEM_MC_COMMIT_EFLAGS(EFlags);
16665 IEM_MC_ADVANCE_RIP();
16666 IEM_MC_END();
16667 return VINF_SUCCESS;
16668 }
16669
16670 case IEMMODE_64BIT:
16671 {
16672 IEM_MC_BEGIN(3, 2);
16673 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16674 IEM_MC_ARG(uint64_t, u64Src, 1);
16675 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16677
16678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16679 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16680 IEM_MC_ASSIGN(u64Src, u64Imm);
16681 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16682 IEM_MC_FETCH_EFLAGS(EFlags);
16683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16684
16685 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16686 IEM_MC_COMMIT_EFLAGS(EFlags);
16687 IEM_MC_ADVANCE_RIP();
16688 IEM_MC_END();
16689 return VINF_SUCCESS;
16690 }
16691
16692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16693 }
16694 }
16695}
16696
16697
16698/** Opcode 0xf6 /4, /5, /6 and /7. */
16699FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16700{
16701 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16702
16703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16704 {
16705 /* register access */
16706 IEMOP_HLP_NO_LOCK_PREFIX();
16707 IEM_MC_BEGIN(3, 1);
16708 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16709 IEM_MC_ARG(uint8_t, u8Value, 1);
16710 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16711 IEM_MC_LOCAL(int32_t, rc);
16712
16713 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16714 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16715 IEM_MC_REF_EFLAGS(pEFlags);
16716 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16717 IEM_MC_IF_LOCAL_IS_Z(rc) {
16718 IEM_MC_ADVANCE_RIP();
16719 } IEM_MC_ELSE() {
16720 IEM_MC_RAISE_DIVIDE_ERROR();
16721 } IEM_MC_ENDIF();
16722
16723 IEM_MC_END();
16724 }
16725 else
16726 {
16727 /* memory access. */
16728 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16729
16730 IEM_MC_BEGIN(3, 2);
16731 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16732 IEM_MC_ARG(uint8_t, u8Value, 1);
16733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16735 IEM_MC_LOCAL(int32_t, rc);
16736
16737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16738 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16739 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16740 IEM_MC_REF_EFLAGS(pEFlags);
16741 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16742 IEM_MC_IF_LOCAL_IS_Z(rc) {
16743 IEM_MC_ADVANCE_RIP();
16744 } IEM_MC_ELSE() {
16745 IEM_MC_RAISE_DIVIDE_ERROR();
16746 } IEM_MC_ENDIF();
16747
16748 IEM_MC_END();
16749 }
16750 return VINF_SUCCESS;
16751}
16752
16753
16754/** Opcode 0xf7 /4, /5, /6 and /7. */
16755FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16756{
16757 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16758 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16759
16760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16761 {
16762 /* register access */
16763 switch (pIemCpu->enmEffOpSize)
16764 {
16765 case IEMMODE_16BIT:
16766 {
16767 IEMOP_HLP_NO_LOCK_PREFIX();
16768 IEM_MC_BEGIN(4, 1);
16769 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16770 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16771 IEM_MC_ARG(uint16_t, u16Value, 2);
16772 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16773 IEM_MC_LOCAL(int32_t, rc);
16774
16775 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16776 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16777 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16778 IEM_MC_REF_EFLAGS(pEFlags);
16779 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16780 IEM_MC_IF_LOCAL_IS_Z(rc) {
16781 IEM_MC_ADVANCE_RIP();
16782 } IEM_MC_ELSE() {
16783 IEM_MC_RAISE_DIVIDE_ERROR();
16784 } IEM_MC_ENDIF();
16785
16786 IEM_MC_END();
16787 return VINF_SUCCESS;
16788 }
16789
16790 case IEMMODE_32BIT:
16791 {
16792 IEMOP_HLP_NO_LOCK_PREFIX();
16793 IEM_MC_BEGIN(4, 1);
16794 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16795 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16796 IEM_MC_ARG(uint32_t, u32Value, 2);
16797 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16798 IEM_MC_LOCAL(int32_t, rc);
16799
16800 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16801 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16802 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16803 IEM_MC_REF_EFLAGS(pEFlags);
16804 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16805 IEM_MC_IF_LOCAL_IS_Z(rc) {
16806 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16807 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16808 IEM_MC_ADVANCE_RIP();
16809 } IEM_MC_ELSE() {
16810 IEM_MC_RAISE_DIVIDE_ERROR();
16811 } IEM_MC_ENDIF();
16812
16813 IEM_MC_END();
16814 return VINF_SUCCESS;
16815 }
16816
16817 case IEMMODE_64BIT:
16818 {
16819 IEMOP_HLP_NO_LOCK_PREFIX();
16820 IEM_MC_BEGIN(4, 1);
16821 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16822 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16823 IEM_MC_ARG(uint64_t, u64Value, 2);
16824 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16825 IEM_MC_LOCAL(int32_t, rc);
16826
16827 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16828 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16829 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16830 IEM_MC_REF_EFLAGS(pEFlags);
16831 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16832 IEM_MC_IF_LOCAL_IS_Z(rc) {
16833 IEM_MC_ADVANCE_RIP();
16834 } IEM_MC_ELSE() {
16835 IEM_MC_RAISE_DIVIDE_ERROR();
16836 } IEM_MC_ENDIF();
16837
16838 IEM_MC_END();
16839 return VINF_SUCCESS;
16840 }
16841
16842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16843 }
16844 }
16845 else
16846 {
16847 /* memory access. */
16848 switch (pIemCpu->enmEffOpSize)
16849 {
16850 case IEMMODE_16BIT:
16851 {
16852 IEMOP_HLP_NO_LOCK_PREFIX();
16853 IEM_MC_BEGIN(4, 2);
16854 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16855 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16856 IEM_MC_ARG(uint16_t, u16Value, 2);
16857 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16859 IEM_MC_LOCAL(int32_t, rc);
16860
16861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16862 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16863 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16864 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16865 IEM_MC_REF_EFLAGS(pEFlags);
16866 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16867 IEM_MC_IF_LOCAL_IS_Z(rc) {
16868 IEM_MC_ADVANCE_RIP();
16869 } IEM_MC_ELSE() {
16870 IEM_MC_RAISE_DIVIDE_ERROR();
16871 } IEM_MC_ENDIF();
16872
16873 IEM_MC_END();
16874 return VINF_SUCCESS;
16875 }
16876
16877 case IEMMODE_32BIT:
16878 {
16879 IEMOP_HLP_NO_LOCK_PREFIX();
16880 IEM_MC_BEGIN(4, 2);
16881 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16882 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16883 IEM_MC_ARG(uint32_t, u32Value, 2);
16884 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16886 IEM_MC_LOCAL(int32_t, rc);
16887
16888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16889 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16890 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16891 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16892 IEM_MC_REF_EFLAGS(pEFlags);
16893 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16894 IEM_MC_IF_LOCAL_IS_Z(rc) {
16895 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16896 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16897 IEM_MC_ADVANCE_RIP();
16898 } IEM_MC_ELSE() {
16899 IEM_MC_RAISE_DIVIDE_ERROR();
16900 } IEM_MC_ENDIF();
16901
16902 IEM_MC_END();
16903 return VINF_SUCCESS;
16904 }
16905
16906 case IEMMODE_64BIT:
16907 {
16908 IEMOP_HLP_NO_LOCK_PREFIX();
16909 IEM_MC_BEGIN(4, 2);
16910 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16911 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16912 IEM_MC_ARG(uint64_t, u64Value, 2);
16913 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16915 IEM_MC_LOCAL(int32_t, rc);
16916
16917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16918 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16919 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16920 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16921 IEM_MC_REF_EFLAGS(pEFlags);
16922 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16923 IEM_MC_IF_LOCAL_IS_Z(rc) {
16924 IEM_MC_ADVANCE_RIP();
16925 } IEM_MC_ELSE() {
16926 IEM_MC_RAISE_DIVIDE_ERROR();
16927 } IEM_MC_ENDIF();
16928
16929 IEM_MC_END();
16930 return VINF_SUCCESS;
16931 }
16932
16933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16934 }
16935 }
16936}
16937
16938/** Opcode 0xf6. */
16939FNIEMOP_DEF(iemOp_Grp3_Eb)
16940{
16941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16942 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16943 {
16944 case 0:
16945 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16946 case 1:
16947/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16948 return IEMOP_RAISE_INVALID_OPCODE();
16949 case 2:
16950 IEMOP_MNEMONIC("not Eb");
16951 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16952 case 3:
16953 IEMOP_MNEMONIC("neg Eb");
16954 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16955 case 4:
16956 IEMOP_MNEMONIC("mul Eb");
16957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16958 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16959 case 5:
16960 IEMOP_MNEMONIC("imul Eb");
16961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16962 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16963 case 6:
16964 IEMOP_MNEMONIC("div Eb");
16965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16966 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16967 case 7:
16968 IEMOP_MNEMONIC("idiv Eb");
16969 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16970 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16972 }
16973}
16974
16975
16976/** Opcode 0xf7. */
16977FNIEMOP_DEF(iemOp_Grp3_Ev)
16978{
16979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16980 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16981 {
16982 case 0:
16983 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
16984 case 1:
16985/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16986 return IEMOP_RAISE_INVALID_OPCODE();
16987 case 2:
16988 IEMOP_MNEMONIC("not Ev");
16989 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
16990 case 3:
16991 IEMOP_MNEMONIC("neg Ev");
16992 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
16993 case 4:
16994 IEMOP_MNEMONIC("mul Ev");
16995 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16996 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
16997 case 5:
16998 IEMOP_MNEMONIC("imul Ev");
16999 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17000 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17001 case 6:
17002 IEMOP_MNEMONIC("div Ev");
17003 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17004 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17005 case 7:
17006 IEMOP_MNEMONIC("idiv Ev");
17007 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17008 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17010 }
17011}
17012
17013
17014/** Opcode 0xf8. */
17015FNIEMOP_DEF(iemOp_clc)
17016{
17017 IEMOP_MNEMONIC("clc");
17018 IEMOP_HLP_NO_LOCK_PREFIX();
17019 IEM_MC_BEGIN(0, 0);
17020 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17021 IEM_MC_ADVANCE_RIP();
17022 IEM_MC_END();
17023 return VINF_SUCCESS;
17024}
17025
17026
17027/** Opcode 0xf9. */
17028FNIEMOP_DEF(iemOp_stc)
17029{
17030 IEMOP_MNEMONIC("stc");
17031 IEMOP_HLP_NO_LOCK_PREFIX();
17032 IEM_MC_BEGIN(0, 0);
17033 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17034 IEM_MC_ADVANCE_RIP();
17035 IEM_MC_END();
17036 return VINF_SUCCESS;
17037}
17038
17039
17040/** Opcode 0xfa. */
17041FNIEMOP_DEF(iemOp_cli)
17042{
17043 IEMOP_MNEMONIC("cli");
17044 IEMOP_HLP_NO_LOCK_PREFIX();
17045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17046}
17047
17048
17049FNIEMOP_DEF(iemOp_sti)
17050{
17051 IEMOP_MNEMONIC("sti");
17052 IEMOP_HLP_NO_LOCK_PREFIX();
17053 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17054}
17055
17056
17057/** Opcode 0xfc. */
17058FNIEMOP_DEF(iemOp_cld)
17059{
17060 IEMOP_MNEMONIC("cld");
17061 IEMOP_HLP_NO_LOCK_PREFIX();
17062 IEM_MC_BEGIN(0, 0);
17063 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17064 IEM_MC_ADVANCE_RIP();
17065 IEM_MC_END();
17066 return VINF_SUCCESS;
17067}
17068
17069
17070/** Opcode 0xfd. */
17071FNIEMOP_DEF(iemOp_std)
17072{
17073 IEMOP_MNEMONIC("std");
17074 IEMOP_HLP_NO_LOCK_PREFIX();
17075 IEM_MC_BEGIN(0, 0);
17076 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17077 IEM_MC_ADVANCE_RIP();
17078 IEM_MC_END();
17079 return VINF_SUCCESS;
17080}
17081
17082
17083/** Opcode 0xfe. */
17084FNIEMOP_DEF(iemOp_Grp4)
17085{
17086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17087 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17088 {
17089 case 0:
17090 IEMOP_MNEMONIC("inc Ev");
17091 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17092 case 1:
17093 IEMOP_MNEMONIC("dec Ev");
17094 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17095 default:
17096 IEMOP_MNEMONIC("grp4-ud");
17097 return IEMOP_RAISE_INVALID_OPCODE();
17098 }
17099}
17100
17101
17102/**
17103 * Opcode 0xff /2.
17104 * @param bRm The RM byte.
17105 */
17106FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17107{
17108 IEMOP_MNEMONIC("calln Ev");
17109 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17111
17112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17113 {
17114 /* The new RIP is taken from a register. */
17115 switch (pIemCpu->enmEffOpSize)
17116 {
17117 case IEMMODE_16BIT:
17118 IEM_MC_BEGIN(1, 0);
17119 IEM_MC_ARG(uint16_t, u16Target, 0);
17120 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17121 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17122 IEM_MC_END()
17123 return VINF_SUCCESS;
17124
17125 case IEMMODE_32BIT:
17126 IEM_MC_BEGIN(1, 0);
17127 IEM_MC_ARG(uint32_t, u32Target, 0);
17128 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17129 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17130 IEM_MC_END()
17131 return VINF_SUCCESS;
17132
17133 case IEMMODE_64BIT:
17134 IEM_MC_BEGIN(1, 0);
17135 IEM_MC_ARG(uint64_t, u64Target, 0);
17136 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17137 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17138 IEM_MC_END()
17139 return VINF_SUCCESS;
17140
17141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17142 }
17143 }
17144 else
17145 {
17146 /* The new RIP is taken from a register. */
17147 switch (pIemCpu->enmEffOpSize)
17148 {
17149 case IEMMODE_16BIT:
17150 IEM_MC_BEGIN(1, 1);
17151 IEM_MC_ARG(uint16_t, u16Target, 0);
17152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17154 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17155 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17156 IEM_MC_END()
17157 return VINF_SUCCESS;
17158
17159 case IEMMODE_32BIT:
17160 IEM_MC_BEGIN(1, 1);
17161 IEM_MC_ARG(uint32_t, u32Target, 0);
17162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17164 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17165 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17166 IEM_MC_END()
17167 return VINF_SUCCESS;
17168
17169 case IEMMODE_64BIT:
17170 IEM_MC_BEGIN(1, 1);
17171 IEM_MC_ARG(uint64_t, u64Target, 0);
17172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17174 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17175 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17176 IEM_MC_END()
17177 return VINF_SUCCESS;
17178
17179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17180 }
17181 }
17182}
17183
17184typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17185
17186FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17187{
17188 /* Registers? How?? */
17189 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17190 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17191
17192 /* Far pointer loaded from memory. */
17193 switch (pIemCpu->enmEffOpSize)
17194 {
17195 case IEMMODE_16BIT:
17196 IEM_MC_BEGIN(3, 1);
17197 IEM_MC_ARG(uint16_t, u16Sel, 0);
17198 IEM_MC_ARG(uint16_t, offSeg, 1);
17199 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17203 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17204 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17205 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17206 IEM_MC_END();
17207 return VINF_SUCCESS;
17208
17209 case IEMMODE_64BIT:
17210 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17211 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17212 * and call far qword [rsp] encodings. */
17213 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17214 {
17215 IEM_MC_BEGIN(3, 1);
17216 IEM_MC_ARG(uint16_t, u16Sel, 0);
17217 IEM_MC_ARG(uint64_t, offSeg, 1);
17218 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17222 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17223 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17224 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17225 IEM_MC_END();
17226 return VINF_SUCCESS;
17227 }
17228 /* AMD falls thru. */
17229
17230 case IEMMODE_32BIT:
17231 IEM_MC_BEGIN(3, 1);
17232 IEM_MC_ARG(uint16_t, u16Sel, 0);
17233 IEM_MC_ARG(uint32_t, offSeg, 1);
17234 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17238 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17239 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17240 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17241 IEM_MC_END();
17242 return VINF_SUCCESS;
17243
17244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17245 }
17246}
17247
17248
17249/**
17250 * Opcode 0xff /3.
17251 * @param bRm The RM byte.
17252 */
17253FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17254{
17255 IEMOP_MNEMONIC("callf Ep");
17256 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17257}
17258
17259
17260/**
17261 * Opcode 0xff /4.
17262 * @param bRm The RM byte.
17263 */
17264FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17265{
17266 IEMOP_MNEMONIC("jmpn Ev");
17267 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17268 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17269
17270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17271 {
17272 /* The new RIP is taken from a register. */
17273 switch (pIemCpu->enmEffOpSize)
17274 {
17275 case IEMMODE_16BIT:
17276 IEM_MC_BEGIN(0, 1);
17277 IEM_MC_LOCAL(uint16_t, u16Target);
17278 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17279 IEM_MC_SET_RIP_U16(u16Target);
17280 IEM_MC_END()
17281 return VINF_SUCCESS;
17282
17283 case IEMMODE_32BIT:
17284 IEM_MC_BEGIN(0, 1);
17285 IEM_MC_LOCAL(uint32_t, u32Target);
17286 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17287 IEM_MC_SET_RIP_U32(u32Target);
17288 IEM_MC_END()
17289 return VINF_SUCCESS;
17290
17291 case IEMMODE_64BIT:
17292 IEM_MC_BEGIN(0, 1);
17293 IEM_MC_LOCAL(uint64_t, u64Target);
17294 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17295 IEM_MC_SET_RIP_U64(u64Target);
17296 IEM_MC_END()
17297 return VINF_SUCCESS;
17298
17299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17300 }
17301 }
17302 else
17303 {
17304 /* The new RIP is taken from a memory location. */
17305 switch (pIemCpu->enmEffOpSize)
17306 {
17307 case IEMMODE_16BIT:
17308 IEM_MC_BEGIN(0, 2);
17309 IEM_MC_LOCAL(uint16_t, u16Target);
17310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17312 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17313 IEM_MC_SET_RIP_U16(u16Target);
17314 IEM_MC_END()
17315 return VINF_SUCCESS;
17316
17317 case IEMMODE_32BIT:
17318 IEM_MC_BEGIN(0, 2);
17319 IEM_MC_LOCAL(uint32_t, u32Target);
17320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17322 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17323 IEM_MC_SET_RIP_U32(u32Target);
17324 IEM_MC_END()
17325 return VINF_SUCCESS;
17326
17327 case IEMMODE_64BIT:
17328 IEM_MC_BEGIN(0, 2);
17329 IEM_MC_LOCAL(uint64_t, u64Target);
17330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17332 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17333 IEM_MC_SET_RIP_U64(u64Target);
17334 IEM_MC_END()
17335 return VINF_SUCCESS;
17336
17337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17338 }
17339 }
17340}
17341
17342
17343/**
17344 * Opcode 0xff /5.
17345 * @param bRm The RM byte.
17346 */
17347FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17348{
17349 IEMOP_MNEMONIC("jmpf Ep");
17350 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17351}
17352
17353
17354/**
17355 * Opcode 0xff /6.
17356 * @param bRm The RM byte.
17357 */
17358FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17359{
17360 IEMOP_MNEMONIC("push Ev");
17361 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17362
17363 /* Registers are handled by a common worker. */
17364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17365 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17366
17367 /* Memory we do here. */
17368 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17369 switch (pIemCpu->enmEffOpSize)
17370 {
17371 case IEMMODE_16BIT:
17372 IEM_MC_BEGIN(0, 2);
17373 IEM_MC_LOCAL(uint16_t, u16Src);
17374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17376 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17377 IEM_MC_PUSH_U16(u16Src);
17378 IEM_MC_ADVANCE_RIP();
17379 IEM_MC_END();
17380 return VINF_SUCCESS;
17381
17382 case IEMMODE_32BIT:
17383 IEM_MC_BEGIN(0, 2);
17384 IEM_MC_LOCAL(uint32_t, u32Src);
17385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17387 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17388 IEM_MC_PUSH_U32(u32Src);
17389 IEM_MC_ADVANCE_RIP();
17390 IEM_MC_END();
17391 return VINF_SUCCESS;
17392
17393 case IEMMODE_64BIT:
17394 IEM_MC_BEGIN(0, 2);
17395 IEM_MC_LOCAL(uint64_t, u64Src);
17396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17398 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17399 IEM_MC_PUSH_U64(u64Src);
17400 IEM_MC_ADVANCE_RIP();
17401 IEM_MC_END();
17402 return VINF_SUCCESS;
17403
17404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17405 }
17406}
17407
17408
17409/** Opcode 0xff. */
17410FNIEMOP_DEF(iemOp_Grp5)
17411{
17412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17413 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17414 {
17415 case 0:
17416 IEMOP_MNEMONIC("inc Ev");
17417 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17418 case 1:
17419 IEMOP_MNEMONIC("dec Ev");
17420 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17421 case 2:
17422 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17423 case 3:
17424 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17425 case 4:
17426 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17427 case 5:
17428 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17429 case 6:
17430 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17431 case 7:
17432 IEMOP_MNEMONIC("grp5-ud");
17433 return IEMOP_RAISE_INVALID_OPCODE();
17434 }
17435 AssertFailedReturn(VERR_IEM_IPE_3);
17436}
17437
17438
17439
17440const PFNIEMOP g_apfnOneByteMap[256] =
17441{
17442 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17443 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17444 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17445 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17446 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17447 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17448 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17449 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17450 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17451 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17452 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17453 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17454 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17455 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17456 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17457 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17458 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17459 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17460 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17461 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17462 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17463 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17464 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17465 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17466 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17467 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17468 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17469 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17470 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17471 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17472 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17473 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17474 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17475 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17476 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17477 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17478 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17479 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17480 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17481 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17482 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17483 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17484 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17485 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17486 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17487 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17488 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17489 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17490 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17491 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17492 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17493 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17494 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17495 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17496 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17497 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17498 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17499 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17500 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17501 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17502 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17503 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17504 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17505 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17506};
17507
17508
17509/** @} */
17510
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette