VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61639

Last change on this file since 61639 was 61639, checked in by vboxsync, 9 years ago

IEM: Temporarily disabled movlps and movups.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 610.0 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61639 2016-06-09 19:39:43Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507
1508
1509/** Opcode 0x0f 0x11. */
1510#if 0 /* something is causing regressions, disabling temporarily. */
1511FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1512{
1513 /* Quick hack. Need to restructure all of this later some time. */
1514 if (pIemCpu->fPrefixes == 0)
1515 {
1516 IEMOP_MNEMONIC("movups Wps,Vps");
1517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1518 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1519 {
1520 /*
1521 * Register, register.
1522 */
1523 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1524 IEM_MC_BEGIN(0, 0);
1525 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1527 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1528 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1529 IEM_MC_ADVANCE_RIP();
1530 IEM_MC_END();
1531 }
1532 else
1533 {
1534 /*
1535 * Memory, register.
1536 */
1537 IEM_MC_BEGIN(0, 2);
1538 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1540
1541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1545
1546 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1547 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1548
1549 IEM_MC_ADVANCE_RIP();
1550 IEM_MC_END();
1551 }
1552 return VINF_SUCCESS;
1553 }
1554
1555 IEMOP_BITCH_ABOUT_STUB();
1556 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1557}
1558#else
1559FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1560#endif
1561
1562
1563/** Opcode 0x0f 0x12. */
1564FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1565
1566
1567/** Opcode 0x0f 0x13. */
1568#if 0 /* something is causing regressions (probably not this one), disabling temporarily. */
1569FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1570{
1571 /* Quick hack. Need to restructure all of this later some time. */
1572 if (pIemCpu->fPrefixes == IEM_OP_PRF_SIZE_OP)
1573 {
1574 IEMOP_MNEMONIC("movlpd Mq,Vq");
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578#if 0
1579 /*
1580 * Register, register.
1581 */
1582 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1583 IEM_MC_BEGIN(0, 1);
1584 IEM_MC_LOCAL(uint64_t, uSrc);
1585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1587 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1588 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uSrc);
1589 IEM_MC_ADVANCE_RIP();
1590 IEM_MC_END();
1591#else
1592 return IEMOP_RAISE_INVALID_OPCODE();
1593#endif
1594 }
1595 else
1596 {
1597 /*
1598 * Memory, register.
1599 */
1600 IEM_MC_BEGIN(0, 2);
1601 IEM_MC_LOCAL(uint64_t, uSrc);
1602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1603
1604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1605 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1606 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1607 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1608
1609 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1610 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1611
1612 IEM_MC_ADVANCE_RIP();
1613 IEM_MC_END();
1614 }
1615 return VINF_SUCCESS;
1616 }
1617
1618 IEMOP_BITCH_ABOUT_STUB();
1619 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1620}
1621#else
1622FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
1623#endif
1624
1625
1626/** Opcode 0x0f 0x14. */
1627FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1628/** Opcode 0x0f 0x15. */
1629FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1630/** Opcode 0x0f 0x16. */
1631FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1632/** Opcode 0x0f 0x17. */
1633FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1634
1635
1636/** Opcode 0x0f 0x18. */
1637FNIEMOP_DEF(iemOp_prefetch_Grp16)
1638{
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 IEMOP_HLP_NO_LOCK_PREFIX();
1643 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1644 {
1645 case 4: /* Aliased to /0 for the time being according to AMD. */
1646 case 5: /* Aliased to /0 for the time being according to AMD. */
1647 case 6: /* Aliased to /0 for the time being according to AMD. */
1648 case 7: /* Aliased to /0 for the time being according to AMD. */
1649 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1650 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1651 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1652 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1654 }
1655
1656 IEM_MC_BEGIN(0, 1);
1657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1659 /* Currently a NOP. */
1660 IEM_MC_ADVANCE_RIP();
1661 IEM_MC_END();
1662 return VINF_SUCCESS;
1663 }
1664
1665 return IEMOP_RAISE_INVALID_OPCODE();
1666}
1667
1668
1669/** Opcode 0x0f 0x19..0x1f. */
1670FNIEMOP_DEF(iemOp_nop_Ev)
1671{
1672 IEMOP_HLP_NO_LOCK_PREFIX();
1673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1675 {
1676 IEM_MC_BEGIN(0, 0);
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 else
1681 {
1682 IEM_MC_BEGIN(0, 1);
1683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1685 /* Currently a NOP. */
1686 IEM_MC_ADVANCE_RIP();
1687 IEM_MC_END();
1688 }
1689 return VINF_SUCCESS;
1690}
1691
1692
1693/** Opcode 0x0f 0x20. */
1694FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1695{
1696 /* mod is ignored, as is operand size overrides. */
1697 IEMOP_MNEMONIC("mov Rd,Cd");
1698 IEMOP_HLP_MIN_386();
1699 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1700 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1701 else
1702 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1703
1704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1706 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1707 {
1708 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1709 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1710 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1711 iCrReg |= 8;
1712 }
1713 switch (iCrReg)
1714 {
1715 case 0: case 2: case 3: case 4: case 8:
1716 break;
1717 default:
1718 return IEMOP_RAISE_INVALID_OPCODE();
1719 }
1720 IEMOP_HLP_DONE_DECODING();
1721
1722 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1723}
1724
1725
1726/** Opcode 0x0f 0x21. */
1727FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1728{
1729 IEMOP_MNEMONIC("mov Rd,Dd");
1730 IEMOP_HLP_MIN_386();
1731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1732 IEMOP_HLP_NO_LOCK_PREFIX();
1733 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1734 return IEMOP_RAISE_INVALID_OPCODE();
1735 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1736 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1737 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1738}
1739
1740
1741/** Opcode 0x0f 0x22. */
1742FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1743{
1744 /* mod is ignored, as is operand size overrides. */
1745 IEMOP_MNEMONIC("mov Cd,Rd");
1746 IEMOP_HLP_MIN_386();
1747 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1748 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1749 else
1750 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1751
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1754 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1755 {
1756 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1757 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1758 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1759 iCrReg |= 8;
1760 }
1761 switch (iCrReg)
1762 {
1763 case 0: case 2: case 3: case 4: case 8:
1764 break;
1765 default:
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767 }
1768 IEMOP_HLP_DONE_DECODING();
1769
1770 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1771}
1772
1773
1774/** Opcode 0x0f 0x23. */
1775FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1776{
1777 IEMOP_MNEMONIC("mov Dd,Rd");
1778 IEMOP_HLP_MIN_386();
1779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1781 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1784 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1785 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1786}
1787
1788
1789/** Opcode 0x0f 0x24. */
1790FNIEMOP_DEF(iemOp_mov_Rd_Td)
1791{
1792 IEMOP_MNEMONIC("mov Rd,Td");
1793 /** @todo works on 386 and 486. */
1794 /* The RM byte is not considered, see testcase. */
1795 return IEMOP_RAISE_INVALID_OPCODE();
1796}
1797
1798
1799/** Opcode 0x0f 0x26. */
1800FNIEMOP_DEF(iemOp_mov_Td_Rd)
1801{
1802 IEMOP_MNEMONIC("mov Td,Rd");
1803 /** @todo works on 386 and 486. */
1804 /* The RM byte is not considered, see testcase. */
1805 return IEMOP_RAISE_INVALID_OPCODE();
1806}
1807
1808
1809/** Opcode 0x0f 0x28. */
1810FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1811{
1812 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1815 {
1816 /*
1817 * Register, register.
1818 */
1819 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1820 IEM_MC_BEGIN(0, 0);
1821 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1823 else
1824 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1825 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1826 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1827 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1828 IEM_MC_ADVANCE_RIP();
1829 IEM_MC_END();
1830 }
1831 else
1832 {
1833 /*
1834 * Register, memory.
1835 */
1836 IEM_MC_BEGIN(0, 2);
1837 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1839
1840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1841 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1842 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1843 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1844 else
1845 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1846 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1847
1848 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1849 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1850
1851 IEM_MC_ADVANCE_RIP();
1852 IEM_MC_END();
1853 }
1854 return VINF_SUCCESS;
1855}
1856
1857
1858/** Opcode 0x0f 0x29. */
1859FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1860{
1861 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1864 {
1865 /*
1866 * Register, register.
1867 */
1868 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1869 IEM_MC_BEGIN(0, 0);
1870 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1871 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1872 else
1873 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1874 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1875 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1876 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1877 IEM_MC_ADVANCE_RIP();
1878 IEM_MC_END();
1879 }
1880 else
1881 {
1882 /*
1883 * Memory, register.
1884 */
1885 IEM_MC_BEGIN(0, 2);
1886 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1888
1889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1890 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1891 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1892 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1893 else
1894 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1895 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1896
1897 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1898 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1899
1900 IEM_MC_ADVANCE_RIP();
1901 IEM_MC_END();
1902 }
1903 return VINF_SUCCESS;
1904}
1905
1906
1907/** Opcode 0x0f 0x2a. */
1908FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1909
1910
1911/** Opcode 0x0f 0x2b. */
1912#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
1913FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1914{
1915 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1917 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1918 {
1919 /*
1920 * memory, register.
1921 */
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1928 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1930 else
1931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1933
1934 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1935 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1936
1937 IEM_MC_ADVANCE_RIP();
1938 IEM_MC_END();
1939 }
1940 /* The register, register encoding is invalid. */
1941 else
1942 return IEMOP_RAISE_INVALID_OPCODE();
1943 return VINF_SUCCESS;
1944}
1945#else
1946FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1947#endif
1948
1949
1950/** Opcode 0x0f 0x2c. */
1951FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1952/** Opcode 0x0f 0x2d. */
1953FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1954/** Opcode 0x0f 0x2e. */
1955FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1956/** Opcode 0x0f 0x2f. */
1957FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1958
1959
1960/** Opcode 0x0f 0x30. */
1961FNIEMOP_DEF(iemOp_wrmsr)
1962{
1963 IEMOP_MNEMONIC("wrmsr");
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1966}
1967
1968
1969/** Opcode 0x0f 0x31. */
1970FNIEMOP_DEF(iemOp_rdtsc)
1971{
1972 IEMOP_MNEMONIC("rdtsc");
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1975}
1976
1977
1978/** Opcode 0x0f 0x33. */
1979FNIEMOP_DEF(iemOp_rdmsr)
1980{
1981 IEMOP_MNEMONIC("rdmsr");
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1984}
1985
1986
1987/** Opcode 0x0f 0x34. */
1988FNIEMOP_STUB(iemOp_rdpmc);
1989/** Opcode 0x0f 0x34. */
1990FNIEMOP_STUB(iemOp_sysenter);
1991/** Opcode 0x0f 0x35. */
1992FNIEMOP_STUB(iemOp_sysexit);
1993/** Opcode 0x0f 0x37. */
1994FNIEMOP_STUB(iemOp_getsec);
1995/** Opcode 0x0f 0x38. */
1996FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1997/** Opcode 0x0f 0x3a. */
1998FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1999
2000
2001/**
2002 * Implements a conditional move.
2003 *
2004 * Wish there was an obvious way to do this where we could share and reduce
2005 * code bloat.
2006 *
2007 * @param a_Cnd The conditional "microcode" operation.
2008 */
2009#define CMOV_X(a_Cnd) \
2010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2012 { \
2013 switch (pIemCpu->enmEffOpSize) \
2014 { \
2015 case IEMMODE_16BIT: \
2016 IEM_MC_BEGIN(0, 1); \
2017 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2018 a_Cnd { \
2019 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
2020 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
2021 } IEM_MC_ENDIF(); \
2022 IEM_MC_ADVANCE_RIP(); \
2023 IEM_MC_END(); \
2024 return VINF_SUCCESS; \
2025 \
2026 case IEMMODE_32BIT: \
2027 IEM_MC_BEGIN(0, 1); \
2028 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2029 a_Cnd { \
2030 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
2031 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
2032 } IEM_MC_ELSE() { \
2033 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
2034 } IEM_MC_ENDIF(); \
2035 IEM_MC_ADVANCE_RIP(); \
2036 IEM_MC_END(); \
2037 return VINF_SUCCESS; \
2038 \
2039 case IEMMODE_64BIT: \
2040 IEM_MC_BEGIN(0, 1); \
2041 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2042 a_Cnd { \
2043 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
2044 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
2045 } IEM_MC_ENDIF(); \
2046 IEM_MC_ADVANCE_RIP(); \
2047 IEM_MC_END(); \
2048 return VINF_SUCCESS; \
2049 \
2050 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2051 } \
2052 } \
2053 else \
2054 { \
2055 switch (pIemCpu->enmEffOpSize) \
2056 { \
2057 case IEMMODE_16BIT: \
2058 IEM_MC_BEGIN(0, 2); \
2059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2060 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2062 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2063 a_Cnd { \
2064 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
2065 } IEM_MC_ENDIF(); \
2066 IEM_MC_ADVANCE_RIP(); \
2067 IEM_MC_END(); \
2068 return VINF_SUCCESS; \
2069 \
2070 case IEMMODE_32BIT: \
2071 IEM_MC_BEGIN(0, 2); \
2072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2073 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2075 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2076 a_Cnd { \
2077 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
2078 } IEM_MC_ELSE() { \
2079 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
2080 } IEM_MC_ENDIF(); \
2081 IEM_MC_ADVANCE_RIP(); \
2082 IEM_MC_END(); \
2083 return VINF_SUCCESS; \
2084 \
2085 case IEMMODE_64BIT: \
2086 IEM_MC_BEGIN(0, 2); \
2087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2088 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2090 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2091 a_Cnd { \
2092 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
2093 } IEM_MC_ENDIF(); \
2094 IEM_MC_ADVANCE_RIP(); \
2095 IEM_MC_END(); \
2096 return VINF_SUCCESS; \
2097 \
2098 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2099 } \
2100 } do {} while (0)
2101
2102
2103
2104/** Opcode 0x0f 0x40. */
2105FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2106{
2107 IEMOP_MNEMONIC("cmovo Gv,Ev");
2108 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2109}
2110
2111
2112/** Opcode 0x0f 0x41. */
2113FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2114{
2115 IEMOP_MNEMONIC("cmovno Gv,Ev");
2116 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2117}
2118
2119
2120/** Opcode 0x0f 0x42. */
2121FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2122{
2123 IEMOP_MNEMONIC("cmovc Gv,Ev");
2124 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2125}
2126
2127
2128/** Opcode 0x0f 0x43. */
2129FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2130{
2131 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2132 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2133}
2134
2135
2136/** Opcode 0x0f 0x44. */
2137FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2138{
2139 IEMOP_MNEMONIC("cmove Gv,Ev");
2140 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2141}
2142
2143
2144/** Opcode 0x0f 0x45. */
2145FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2146{
2147 IEMOP_MNEMONIC("cmovne Gv,Ev");
2148 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2149}
2150
2151
2152/** Opcode 0x0f 0x46. */
2153FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2154{
2155 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2156 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2157}
2158
2159
2160/** Opcode 0x0f 0x47. */
2161FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2162{
2163 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2164 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2165}
2166
2167
2168/** Opcode 0x0f 0x48. */
2169FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2170{
2171 IEMOP_MNEMONIC("cmovs Gv,Ev");
2172 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2173}
2174
2175
2176/** Opcode 0x0f 0x49. */
2177FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2178{
2179 IEMOP_MNEMONIC("cmovns Gv,Ev");
2180 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2181}
2182
2183
2184/** Opcode 0x0f 0x4a. */
2185FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2186{
2187 IEMOP_MNEMONIC("cmovp Gv,Ev");
2188 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2189}
2190
2191
2192/** Opcode 0x0f 0x4b. */
2193FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2194{
2195 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2196 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2197}
2198
2199
2200/** Opcode 0x0f 0x4c. */
2201FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2202{
2203 IEMOP_MNEMONIC("cmovl Gv,Ev");
2204 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2205}
2206
2207
2208/** Opcode 0x0f 0x4d. */
2209FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2210{
2211 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2212 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2213}
2214
2215
2216/** Opcode 0x0f 0x4e. */
2217FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2218{
2219 IEMOP_MNEMONIC("cmovle Gv,Ev");
2220 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2221}
2222
2223
2224/** Opcode 0x0f 0x4f. */
2225FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2226{
2227 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2228 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2229}
2230
2231#undef CMOV_X
2232
2233/** Opcode 0x0f 0x50. */
2234FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2235/** Opcode 0x0f 0x51. */
2236FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2237/** Opcode 0x0f 0x52. */
2238FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2239/** Opcode 0x0f 0x53. */
2240FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2241/** Opcode 0x0f 0x54. */
2242FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2243/** Opcode 0x0f 0x55. */
2244FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2245/** Opcode 0x0f 0x56. */
2246FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2247/** Opcode 0x0f 0x57. */
2248FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2249/** Opcode 0x0f 0x58. */
2250FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2251/** Opcode 0x0f 0x59. */
2252FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2253/** Opcode 0x0f 0x5a. */
2254FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2255/** Opcode 0x0f 0x5b. */
2256FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2257/** Opcode 0x0f 0x5c. */
2258FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2259/** Opcode 0x0f 0x5d. */
2260FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2261/** Opcode 0x0f 0x5e. */
2262FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2263/** Opcode 0x0f 0x5f. */
2264FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2265
2266
2267/**
2268 * Common worker for SSE2 and MMX instructions on the forms:
2269 * pxxxx xmm1, xmm2/mem128
2270 * pxxxx mm1, mm2/mem32
2271 *
2272 * The 2nd operand is the first half of a register, which in the memory case
2273 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2274 * memory accessed for MMX.
2275 *
2276 * Exceptions type 4.
2277 */
2278FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2279{
2280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2281 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2282 {
2283 case IEM_OP_PRF_SIZE_OP: /* SSE */
2284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2285 {
2286 /*
2287 * Register, register.
2288 */
2289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2290 IEM_MC_BEGIN(2, 0);
2291 IEM_MC_ARG(uint128_t *, pDst, 0);
2292 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2293 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2294 IEM_MC_PREPARE_SSE_USAGE();
2295 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2296 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2297 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2298 IEM_MC_ADVANCE_RIP();
2299 IEM_MC_END();
2300 }
2301 else
2302 {
2303 /*
2304 * Register, memory.
2305 */
2306 IEM_MC_BEGIN(2, 2);
2307 IEM_MC_ARG(uint128_t *, pDst, 0);
2308 IEM_MC_LOCAL(uint64_t, uSrc);
2309 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2311
2312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2315 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2316
2317 IEM_MC_PREPARE_SSE_USAGE();
2318 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2319 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2320
2321 IEM_MC_ADVANCE_RIP();
2322 IEM_MC_END();
2323 }
2324 return VINF_SUCCESS;
2325
2326 case 0: /* MMX */
2327 if (!pImpl->pfnU64)
2328 return IEMOP_RAISE_INVALID_OPCODE();
2329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2330 {
2331 /*
2332 * Register, register.
2333 */
2334 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2335 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2337 IEM_MC_BEGIN(2, 0);
2338 IEM_MC_ARG(uint64_t *, pDst, 0);
2339 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2340 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2341 IEM_MC_PREPARE_FPU_USAGE();
2342 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2343 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2344 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2345 IEM_MC_ADVANCE_RIP();
2346 IEM_MC_END();
2347 }
2348 else
2349 {
2350 /*
2351 * Register, memory.
2352 */
2353 IEM_MC_BEGIN(2, 2);
2354 IEM_MC_ARG(uint64_t *, pDst, 0);
2355 IEM_MC_LOCAL(uint32_t, uSrc);
2356 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2358
2359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2362 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2363
2364 IEM_MC_PREPARE_FPU_USAGE();
2365 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2366 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2367
2368 IEM_MC_ADVANCE_RIP();
2369 IEM_MC_END();
2370 }
2371 return VINF_SUCCESS;
2372
2373 default:
2374 return IEMOP_RAISE_INVALID_OPCODE();
2375 }
2376}
2377
2378
2379/** Opcode 0x0f 0x60. */
2380FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2381{
2382 IEMOP_MNEMONIC("punpcklbw");
2383 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2384}
2385
2386
2387/** Opcode 0x0f 0x61. */
2388FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2389{
2390 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2391 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2392}
2393
2394
2395/** Opcode 0x0f 0x62. */
2396FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2397{
2398 IEMOP_MNEMONIC("punpckldq");
2399 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2400}
2401
2402
2403/** Opcode 0x0f 0x63. */
2404FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2405/** Opcode 0x0f 0x64. */
2406FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2407/** Opcode 0x0f 0x65. */
2408FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2409/** Opcode 0x0f 0x66. */
2410FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2411/** Opcode 0x0f 0x67. */
2412FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2413
2414
2415/**
2416 * Common worker for SSE2 and MMX instructions on the forms:
2417 * pxxxx xmm1, xmm2/mem128
2418 * pxxxx mm1, mm2/mem64
2419 *
2420 * The 2nd operand is the second half of a register, which in the memory case
2421 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2422 * where it may read the full 128 bits or only the upper 64 bits.
2423 *
2424 * Exceptions type 4.
2425 */
2426FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2427{
2428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2429 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2430 {
2431 case IEM_OP_PRF_SIZE_OP: /* SSE */
2432 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2433 {
2434 /*
2435 * Register, register.
2436 */
2437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2438 IEM_MC_BEGIN(2, 0);
2439 IEM_MC_ARG(uint128_t *, pDst, 0);
2440 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2442 IEM_MC_PREPARE_SSE_USAGE();
2443 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2444 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2445 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2446 IEM_MC_ADVANCE_RIP();
2447 IEM_MC_END();
2448 }
2449 else
2450 {
2451 /*
2452 * Register, memory.
2453 */
2454 IEM_MC_BEGIN(2, 2);
2455 IEM_MC_ARG(uint128_t *, pDst, 0);
2456 IEM_MC_LOCAL(uint128_t, uSrc);
2457 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2459
2460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2463 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2464
2465 IEM_MC_PREPARE_SSE_USAGE();
2466 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2467 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2468
2469 IEM_MC_ADVANCE_RIP();
2470 IEM_MC_END();
2471 }
2472 return VINF_SUCCESS;
2473
2474 case 0: /* MMX */
2475 if (!pImpl->pfnU64)
2476 return IEMOP_RAISE_INVALID_OPCODE();
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /*
2480 * Register, register.
2481 */
2482 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2483 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2485 IEM_MC_BEGIN(2, 0);
2486 IEM_MC_ARG(uint64_t *, pDst, 0);
2487 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2488 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2489 IEM_MC_PREPARE_FPU_USAGE();
2490 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2491 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2492 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2493 IEM_MC_ADVANCE_RIP();
2494 IEM_MC_END();
2495 }
2496 else
2497 {
2498 /*
2499 * Register, memory.
2500 */
2501 IEM_MC_BEGIN(2, 2);
2502 IEM_MC_ARG(uint64_t *, pDst, 0);
2503 IEM_MC_LOCAL(uint64_t, uSrc);
2504 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2506
2507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2510 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2511
2512 IEM_MC_PREPARE_FPU_USAGE();
2513 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2514 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2515
2516 IEM_MC_ADVANCE_RIP();
2517 IEM_MC_END();
2518 }
2519 return VINF_SUCCESS;
2520
2521 default:
2522 return IEMOP_RAISE_INVALID_OPCODE();
2523 }
2524}
2525
2526
2527/** Opcode 0x0f 0x68. */
2528FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2529{
2530 IEMOP_MNEMONIC("punpckhbw");
2531 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2532}
2533
2534
2535/** Opcode 0x0f 0x69. */
2536FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2537{
2538 IEMOP_MNEMONIC("punpckhwd");
2539 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2540}
2541
2542
2543/** Opcode 0x0f 0x6a. */
2544FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2545{
2546 IEMOP_MNEMONIC("punpckhdq");
2547 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2548}
2549
2550/** Opcode 0x0f 0x6b. */
2551FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2552
2553
2554/** Opcode 0x0f 0x6c. */
2555FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2556{
2557 IEMOP_MNEMONIC("punpcklqdq");
2558 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2559}
2560
2561
2562/** Opcode 0x0f 0x6d. */
2563FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2564{
2565 IEMOP_MNEMONIC("punpckhqdq");
2566 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2567}
2568
2569
2570/** Opcode 0x0f 0x6e. */
2571FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2572{
2573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2574 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2575 {
2576 case IEM_OP_PRF_SIZE_OP: /* SSE */
2577 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2579 {
2580 /* XMM, greg*/
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEM_MC_BEGIN(0, 1);
2583 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2584 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2585 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2586 {
2587 IEM_MC_LOCAL(uint64_t, u64Tmp);
2588 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2589 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2590 }
2591 else
2592 {
2593 IEM_MC_LOCAL(uint32_t, u32Tmp);
2594 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2595 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2596 }
2597 IEM_MC_ADVANCE_RIP();
2598 IEM_MC_END();
2599 }
2600 else
2601 {
2602 /* XMM, [mem] */
2603 IEM_MC_BEGIN(0, 2);
2604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2605 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2608 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2609 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2610 {
2611 IEM_MC_LOCAL(uint64_t, u64Tmp);
2612 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2613 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2614 }
2615 else
2616 {
2617 IEM_MC_LOCAL(uint32_t, u32Tmp);
2618 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2619 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2620 }
2621 IEM_MC_ADVANCE_RIP();
2622 IEM_MC_END();
2623 }
2624 return VINF_SUCCESS;
2625
2626 case 0: /* MMX */
2627 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2629 {
2630 /* MMX, greg */
2631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2632 IEM_MC_BEGIN(0, 1);
2633 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2634 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2635 IEM_MC_LOCAL(uint64_t, u64Tmp);
2636 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2637 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2638 else
2639 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2640 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2641 IEM_MC_ADVANCE_RIP();
2642 IEM_MC_END();
2643 }
2644 else
2645 {
2646 /* MMX, [mem] */
2647 IEM_MC_BEGIN(0, 2);
2648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2649 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2652 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2653 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2654 {
2655 IEM_MC_LOCAL(uint64_t, u64Tmp);
2656 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2658 }
2659 else
2660 {
2661 IEM_MC_LOCAL(uint32_t, u32Tmp);
2662 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2663 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2664 }
2665 IEM_MC_ADVANCE_RIP();
2666 IEM_MC_END();
2667 }
2668 return VINF_SUCCESS;
2669
2670 default:
2671 return IEMOP_RAISE_INVALID_OPCODE();
2672 }
2673}
2674
2675
2676/** Opcode 0x0f 0x6f. */
2677FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2678{
2679 bool fAligned = false;
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2682 {
2683 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2684 fAligned = true;
2685 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2686 if (fAligned)
2687 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2688 else
2689 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2691 {
2692 /*
2693 * Register, register.
2694 */
2695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2696 IEM_MC_BEGIN(0, 0);
2697 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2698 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2699 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
2700 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2701 IEM_MC_ADVANCE_RIP();
2702 IEM_MC_END();
2703 }
2704 else
2705 {
2706 /*
2707 * Register, memory.
2708 */
2709 IEM_MC_BEGIN(0, 2);
2710 IEM_MC_LOCAL(uint128_t, u128Tmp);
2711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2712
2713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2715 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2716 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2717 if (fAligned)
2718 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2719 else
2720 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2721 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2722
2723 IEM_MC_ADVANCE_RIP();
2724 IEM_MC_END();
2725 }
2726 return VINF_SUCCESS;
2727
2728 case 0: /* MMX */
2729 IEMOP_MNEMONIC("movq Pq,Qq");
2730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2731 {
2732 /*
2733 * Register, register.
2734 */
2735 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2736 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2738 IEM_MC_BEGIN(0, 1);
2739 IEM_MC_LOCAL(uint64_t, u64Tmp);
2740 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2741 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2742 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2743 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2744 IEM_MC_ADVANCE_RIP();
2745 IEM_MC_END();
2746 }
2747 else
2748 {
2749 /*
2750 * Register, memory.
2751 */
2752 IEM_MC_BEGIN(0, 2);
2753 IEM_MC_LOCAL(uint64_t, u64Tmp);
2754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2755
2756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2758 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2759 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2760 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2761 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2762
2763 IEM_MC_ADVANCE_RIP();
2764 IEM_MC_END();
2765 }
2766 return VINF_SUCCESS;
2767
2768 default:
2769 return IEMOP_RAISE_INVALID_OPCODE();
2770 }
2771}
2772
2773
2774/** Opcode 0x0f 0x70. The immediate here is evil! */
2775FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2776{
2777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2778 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2779 {
2780 case IEM_OP_PRF_SIZE_OP: /* SSE */
2781 case IEM_OP_PRF_REPNZ: /* SSE */
2782 case IEM_OP_PRF_REPZ: /* SSE */
2783 {
2784 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2785 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2786 {
2787 case IEM_OP_PRF_SIZE_OP:
2788 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2789 pfnAImpl = iemAImpl_pshufd;
2790 break;
2791 case IEM_OP_PRF_REPNZ:
2792 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2793 pfnAImpl = iemAImpl_pshuflw;
2794 break;
2795 case IEM_OP_PRF_REPZ:
2796 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2797 pfnAImpl = iemAImpl_pshufhw;
2798 break;
2799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2800 }
2801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2802 {
2803 /*
2804 * Register, register.
2805 */
2806 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2808
2809 IEM_MC_BEGIN(3, 0);
2810 IEM_MC_ARG(uint128_t *, pDst, 0);
2811 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2812 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2813 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2814 IEM_MC_PREPARE_SSE_USAGE();
2815 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2816 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2817 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2818 IEM_MC_ADVANCE_RIP();
2819 IEM_MC_END();
2820 }
2821 else
2822 {
2823 /*
2824 * Register, memory.
2825 */
2826 IEM_MC_BEGIN(3, 2);
2827 IEM_MC_ARG(uint128_t *, pDst, 0);
2828 IEM_MC_LOCAL(uint128_t, uSrc);
2829 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2831
2832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2833 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2834 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2837
2838 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2839 IEM_MC_PREPARE_SSE_USAGE();
2840 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2841 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2842
2843 IEM_MC_ADVANCE_RIP();
2844 IEM_MC_END();
2845 }
2846 return VINF_SUCCESS;
2847 }
2848
2849 case 0: /* MMX Extension */
2850 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2852 {
2853 /*
2854 * Register, register.
2855 */
2856 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2858
2859 IEM_MC_BEGIN(3, 0);
2860 IEM_MC_ARG(uint64_t *, pDst, 0);
2861 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2862 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2863 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2864 IEM_MC_PREPARE_FPU_USAGE();
2865 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2866 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2867 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2868 IEM_MC_ADVANCE_RIP();
2869 IEM_MC_END();
2870 }
2871 else
2872 {
2873 /*
2874 * Register, memory.
2875 */
2876 IEM_MC_BEGIN(3, 2);
2877 IEM_MC_ARG(uint64_t *, pDst, 0);
2878 IEM_MC_LOCAL(uint64_t, uSrc);
2879 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2881
2882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2886 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2887
2888 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2889 IEM_MC_PREPARE_FPU_USAGE();
2890 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2891 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2892
2893 IEM_MC_ADVANCE_RIP();
2894 IEM_MC_END();
2895 }
2896 return VINF_SUCCESS;
2897
2898 default:
2899 return IEMOP_RAISE_INVALID_OPCODE();
2900 }
2901}
2902
2903
2904/** Opcode 0x0f 0x71 11/2. */
2905FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2906
2907/** Opcode 0x66 0x0f 0x71 11/2. */
2908FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2909
2910/** Opcode 0x0f 0x71 11/4. */
2911FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2912
2913/** Opcode 0x66 0x0f 0x71 11/4. */
2914FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2915
2916/** Opcode 0x0f 0x71 11/6. */
2917FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2918
2919/** Opcode 0x66 0x0f 0x71 11/6. */
2920FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2921
2922
2923/** Opcode 0x0f 0x71. */
2924FNIEMOP_DEF(iemOp_Grp12)
2925{
2926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2927 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2928 return IEMOP_RAISE_INVALID_OPCODE();
2929 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2930 {
2931 case 0: case 1: case 3: case 5: case 7:
2932 return IEMOP_RAISE_INVALID_OPCODE();
2933 case 2:
2934 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2935 {
2936 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2937 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2938 default: return IEMOP_RAISE_INVALID_OPCODE();
2939 }
2940 case 4:
2941 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2942 {
2943 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2944 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2945 default: return IEMOP_RAISE_INVALID_OPCODE();
2946 }
2947 case 6:
2948 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2949 {
2950 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2951 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2952 default: return IEMOP_RAISE_INVALID_OPCODE();
2953 }
2954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2955 }
2956}
2957
2958
2959/** Opcode 0x0f 0x72 11/2. */
2960FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2961
2962/** Opcode 0x66 0x0f 0x72 11/2. */
2963FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2964
2965/** Opcode 0x0f 0x72 11/4. */
2966FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2967
2968/** Opcode 0x66 0x0f 0x72 11/4. */
2969FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2970
2971/** Opcode 0x0f 0x72 11/6. */
2972FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2973
2974/** Opcode 0x66 0x0f 0x72 11/6. */
2975FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2976
2977
2978/** Opcode 0x0f 0x72. */
2979FNIEMOP_DEF(iemOp_Grp13)
2980{
2981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2982 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2983 return IEMOP_RAISE_INVALID_OPCODE();
2984 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2985 {
2986 case 0: case 1: case 3: case 5: case 7:
2987 return IEMOP_RAISE_INVALID_OPCODE();
2988 case 2:
2989 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2990 {
2991 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2992 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2993 default: return IEMOP_RAISE_INVALID_OPCODE();
2994 }
2995 case 4:
2996 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2997 {
2998 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2999 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3000 default: return IEMOP_RAISE_INVALID_OPCODE();
3001 }
3002 case 6:
3003 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3004 {
3005 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3006 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3007 default: return IEMOP_RAISE_INVALID_OPCODE();
3008 }
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011}
3012
3013
3014/** Opcode 0x0f 0x73 11/2. */
3015FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3016
3017/** Opcode 0x66 0x0f 0x73 11/2. */
3018FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3019
3020/** Opcode 0x66 0x0f 0x73 11/3. */
3021FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3022
3023/** Opcode 0x0f 0x73 11/6. */
3024FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3025
3026/** Opcode 0x66 0x0f 0x73 11/6. */
3027FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3028
3029/** Opcode 0x66 0x0f 0x73 11/7. */
3030FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3031
3032
3033/** Opcode 0x0f 0x73. */
3034FNIEMOP_DEF(iemOp_Grp14)
3035{
3036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3037 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3038 return IEMOP_RAISE_INVALID_OPCODE();
3039 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3040 {
3041 case 0: case 1: case 4: case 5:
3042 return IEMOP_RAISE_INVALID_OPCODE();
3043 case 2:
3044 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3045 {
3046 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3047 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3048 default: return IEMOP_RAISE_INVALID_OPCODE();
3049 }
3050 case 3:
3051 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3052 {
3053 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3054 default: return IEMOP_RAISE_INVALID_OPCODE();
3055 }
3056 case 6:
3057 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3058 {
3059 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3060 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3061 default: return IEMOP_RAISE_INVALID_OPCODE();
3062 }
3063 case 7:
3064 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3065 {
3066 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3067 default: return IEMOP_RAISE_INVALID_OPCODE();
3068 }
3069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3070 }
3071}
3072
3073
3074/**
3075 * Common worker for SSE2 and MMX instructions on the forms:
3076 * pxxx mm1, mm2/mem64
3077 * pxxx xmm1, xmm2/mem128
3078 *
3079 * Proper alignment of the 128-bit operand is enforced.
3080 * Exceptions type 4. SSE2 and MMX cpuid checks.
3081 */
3082FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3083{
3084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3085 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3086 {
3087 case IEM_OP_PRF_SIZE_OP: /* SSE */
3088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3089 {
3090 /*
3091 * Register, register.
3092 */
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 IEM_MC_BEGIN(2, 0);
3095 IEM_MC_ARG(uint128_t *, pDst, 0);
3096 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3097 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3098 IEM_MC_PREPARE_SSE_USAGE();
3099 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3100 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3101 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3102 IEM_MC_ADVANCE_RIP();
3103 IEM_MC_END();
3104 }
3105 else
3106 {
3107 /*
3108 * Register, memory.
3109 */
3110 IEM_MC_BEGIN(2, 2);
3111 IEM_MC_ARG(uint128_t *, pDst, 0);
3112 IEM_MC_LOCAL(uint128_t, uSrc);
3113 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3115
3116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3118 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3119 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3120
3121 IEM_MC_PREPARE_SSE_USAGE();
3122 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3123 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3124
3125 IEM_MC_ADVANCE_RIP();
3126 IEM_MC_END();
3127 }
3128 return VINF_SUCCESS;
3129
3130 case 0: /* MMX */
3131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3132 {
3133 /*
3134 * Register, register.
3135 */
3136 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3137 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3139 IEM_MC_BEGIN(2, 0);
3140 IEM_MC_ARG(uint64_t *, pDst, 0);
3141 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3142 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3143 IEM_MC_PREPARE_FPU_USAGE();
3144 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3145 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3146 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3147 IEM_MC_ADVANCE_RIP();
3148 IEM_MC_END();
3149 }
3150 else
3151 {
3152 /*
3153 * Register, memory.
3154 */
3155 IEM_MC_BEGIN(2, 2);
3156 IEM_MC_ARG(uint64_t *, pDst, 0);
3157 IEM_MC_LOCAL(uint64_t, uSrc);
3158 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3165
3166 IEM_MC_PREPARE_FPU_USAGE();
3167 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3168 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3169
3170 IEM_MC_ADVANCE_RIP();
3171 IEM_MC_END();
3172 }
3173 return VINF_SUCCESS;
3174
3175 default:
3176 return IEMOP_RAISE_INVALID_OPCODE();
3177 }
3178}
3179
3180
3181/** Opcode 0x0f 0x74. */
3182FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3183{
3184 IEMOP_MNEMONIC("pcmpeqb");
3185 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3186}
3187
3188
3189/** Opcode 0x0f 0x75. */
3190FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3191{
3192 IEMOP_MNEMONIC("pcmpeqw");
3193 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3194}
3195
3196
3197/** Opcode 0x0f 0x76. */
3198FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3199{
3200 IEMOP_MNEMONIC("pcmpeqd");
3201 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3202}
3203
3204
3205/** Opcode 0x0f 0x77. */
3206FNIEMOP_STUB(iemOp_emms);
3207/** Opcode 0x0f 0x78. */
3208FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3209/** Opcode 0x0f 0x79. */
3210FNIEMOP_UD_STUB(iemOp_vmwrite);
3211/** Opcode 0x0f 0x7c. */
3212FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3213/** Opcode 0x0f 0x7d. */
3214FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3215
3216
3217/** Opcode 0x0f 0x7e. */
3218FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3219{
3220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3221 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3222 {
3223 case IEM_OP_PRF_SIZE_OP: /* SSE */
3224 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3225 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3226 {
3227 /* greg, XMM */
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 IEM_MC_BEGIN(0, 1);
3230 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3231 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3232 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3233 {
3234 IEM_MC_LOCAL(uint64_t, u64Tmp);
3235 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3236 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3237 }
3238 else
3239 {
3240 IEM_MC_LOCAL(uint32_t, u32Tmp);
3241 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3242 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3243 }
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 /* [mem], XMM */
3250 IEM_MC_BEGIN(0, 2);
3251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3252 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3255 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3256 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3257 {
3258 IEM_MC_LOCAL(uint64_t, u64Tmp);
3259 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3260 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3261 }
3262 else
3263 {
3264 IEM_MC_LOCAL(uint32_t, u32Tmp);
3265 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3266 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3267 }
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 }
3271 return VINF_SUCCESS;
3272
3273 case 0: /* MMX */
3274 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3276 {
3277 /* greg, MMX */
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 IEM_MC_BEGIN(0, 1);
3280 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3282 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3283 {
3284 IEM_MC_LOCAL(uint64_t, u64Tmp);
3285 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3286 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3287 }
3288 else
3289 {
3290 IEM_MC_LOCAL(uint32_t, u32Tmp);
3291 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3292 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3293 }
3294 IEM_MC_ADVANCE_RIP();
3295 IEM_MC_END();
3296 }
3297 else
3298 {
3299 /* [mem], MMX */
3300 IEM_MC_BEGIN(0, 2);
3301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3302 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3305 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3306 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3307 {
3308 IEM_MC_LOCAL(uint64_t, u64Tmp);
3309 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3310 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3311 }
3312 else
3313 {
3314 IEM_MC_LOCAL(uint32_t, u32Tmp);
3315 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3316 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3317 }
3318 IEM_MC_ADVANCE_RIP();
3319 IEM_MC_END();
3320 }
3321 return VINF_SUCCESS;
3322
3323 default:
3324 return IEMOP_RAISE_INVALID_OPCODE();
3325 }
3326}
3327
3328
3329/** Opcode 0x0f 0x7f. */
3330FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3331{
3332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3333 bool fAligned = false;
3334 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3335 {
3336 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3337 fAligned = true;
3338 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3339 if (fAligned)
3340 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3341 else
3342 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3344 {
3345 /*
3346 * Register, register.
3347 */
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3349 IEM_MC_BEGIN(0, 0);
3350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3352 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
3353 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3354 IEM_MC_ADVANCE_RIP();
3355 IEM_MC_END();
3356 }
3357 else
3358 {
3359 /*
3360 * Register, memory.
3361 */
3362 IEM_MC_BEGIN(0, 2);
3363 IEM_MC_LOCAL(uint128_t, u128Tmp);
3364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3365
3366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3370
3371 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3372 if (fAligned)
3373 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3374 else
3375 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3376
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 return VINF_SUCCESS;
3381
3382 case 0: /* MMX */
3383 IEMOP_MNEMONIC("movq Qq,Pq");
3384
3385 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3386 {
3387 /*
3388 * Register, register.
3389 */
3390 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3391 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 IEM_MC_BEGIN(0, 1);
3394 IEM_MC_LOCAL(uint64_t, u64Tmp);
3395 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3396 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3397 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3398 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3399 IEM_MC_ADVANCE_RIP();
3400 IEM_MC_END();
3401 }
3402 else
3403 {
3404 /*
3405 * Register, memory.
3406 */
3407 IEM_MC_BEGIN(0, 2);
3408 IEM_MC_LOCAL(uint64_t, u64Tmp);
3409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3410
3411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3413 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3414 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3415
3416 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3417 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 }
3422 return VINF_SUCCESS;
3423
3424 default:
3425 return IEMOP_RAISE_INVALID_OPCODE();
3426 }
3427}
3428
3429
3430
3431/** Opcode 0x0f 0x80. */
3432FNIEMOP_DEF(iemOp_jo_Jv)
3433{
3434 IEMOP_MNEMONIC("jo Jv");
3435 IEMOP_HLP_MIN_386();
3436 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3437 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3438 {
3439 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3440 IEMOP_HLP_NO_LOCK_PREFIX();
3441
3442 IEM_MC_BEGIN(0, 0);
3443 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3444 IEM_MC_REL_JMP_S16(i16Imm);
3445 } IEM_MC_ELSE() {
3446 IEM_MC_ADVANCE_RIP();
3447 } IEM_MC_ENDIF();
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3453 IEMOP_HLP_NO_LOCK_PREFIX();
3454
3455 IEM_MC_BEGIN(0, 0);
3456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3457 IEM_MC_REL_JMP_S32(i32Imm);
3458 } IEM_MC_ELSE() {
3459 IEM_MC_ADVANCE_RIP();
3460 } IEM_MC_ENDIF();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464}
3465
3466
3467/** Opcode 0x0f 0x81. */
3468FNIEMOP_DEF(iemOp_jno_Jv)
3469{
3470 IEMOP_MNEMONIC("jno Jv");
3471 IEMOP_HLP_MIN_386();
3472 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3473 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3474 {
3475 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3476 IEMOP_HLP_NO_LOCK_PREFIX();
3477
3478 IEM_MC_BEGIN(0, 0);
3479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3480 IEM_MC_ADVANCE_RIP();
3481 } IEM_MC_ELSE() {
3482 IEM_MC_REL_JMP_S16(i16Imm);
3483 } IEM_MC_ENDIF();
3484 IEM_MC_END();
3485 }
3486 else
3487 {
3488 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3489 IEMOP_HLP_NO_LOCK_PREFIX();
3490
3491 IEM_MC_BEGIN(0, 0);
3492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3493 IEM_MC_ADVANCE_RIP();
3494 } IEM_MC_ELSE() {
3495 IEM_MC_REL_JMP_S32(i32Imm);
3496 } IEM_MC_ENDIF();
3497 IEM_MC_END();
3498 }
3499 return VINF_SUCCESS;
3500}
3501
3502
3503/** Opcode 0x0f 0x82. */
3504FNIEMOP_DEF(iemOp_jc_Jv)
3505{
3506 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3507 IEMOP_HLP_MIN_386();
3508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3509 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3510 {
3511 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3512 IEMOP_HLP_NO_LOCK_PREFIX();
3513
3514 IEM_MC_BEGIN(0, 0);
3515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3516 IEM_MC_REL_JMP_S16(i16Imm);
3517 } IEM_MC_ELSE() {
3518 IEM_MC_ADVANCE_RIP();
3519 } IEM_MC_ENDIF();
3520 IEM_MC_END();
3521 }
3522 else
3523 {
3524 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3525 IEMOP_HLP_NO_LOCK_PREFIX();
3526
3527 IEM_MC_BEGIN(0, 0);
3528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3529 IEM_MC_REL_JMP_S32(i32Imm);
3530 } IEM_MC_ELSE() {
3531 IEM_MC_ADVANCE_RIP();
3532 } IEM_MC_ENDIF();
3533 IEM_MC_END();
3534 }
3535 return VINF_SUCCESS;
3536}
3537
3538
3539/** Opcode 0x0f 0x83. */
3540FNIEMOP_DEF(iemOp_jnc_Jv)
3541{
3542 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3543 IEMOP_HLP_MIN_386();
3544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3545 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3546 {
3547 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3548 IEMOP_HLP_NO_LOCK_PREFIX();
3549
3550 IEM_MC_BEGIN(0, 0);
3551 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3552 IEM_MC_ADVANCE_RIP();
3553 } IEM_MC_ELSE() {
3554 IEM_MC_REL_JMP_S16(i16Imm);
3555 } IEM_MC_ENDIF();
3556 IEM_MC_END();
3557 }
3558 else
3559 {
3560 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3561 IEMOP_HLP_NO_LOCK_PREFIX();
3562
3563 IEM_MC_BEGIN(0, 0);
3564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3565 IEM_MC_ADVANCE_RIP();
3566 } IEM_MC_ELSE() {
3567 IEM_MC_REL_JMP_S32(i32Imm);
3568 } IEM_MC_ENDIF();
3569 IEM_MC_END();
3570 }
3571 return VINF_SUCCESS;
3572}
3573
3574
3575/** Opcode 0x0f 0x84. */
3576FNIEMOP_DEF(iemOp_je_Jv)
3577{
3578 IEMOP_MNEMONIC("je/jz Jv");
3579 IEMOP_HLP_MIN_386();
3580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3581 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3582 {
3583 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3584 IEMOP_HLP_NO_LOCK_PREFIX();
3585
3586 IEM_MC_BEGIN(0, 0);
3587 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3588 IEM_MC_REL_JMP_S16(i16Imm);
3589 } IEM_MC_ELSE() {
3590 IEM_MC_ADVANCE_RIP();
3591 } IEM_MC_ENDIF();
3592 IEM_MC_END();
3593 }
3594 else
3595 {
3596 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3597 IEMOP_HLP_NO_LOCK_PREFIX();
3598
3599 IEM_MC_BEGIN(0, 0);
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3601 IEM_MC_REL_JMP_S32(i32Imm);
3602 } IEM_MC_ELSE() {
3603 IEM_MC_ADVANCE_RIP();
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606 }
3607 return VINF_SUCCESS;
3608}
3609
3610
3611/** Opcode 0x0f 0x85. */
3612FNIEMOP_DEF(iemOp_jne_Jv)
3613{
3614 IEMOP_MNEMONIC("jne/jnz Jv");
3615 IEMOP_HLP_MIN_386();
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3617 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3618 {
3619 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3620 IEMOP_HLP_NO_LOCK_PREFIX();
3621
3622 IEM_MC_BEGIN(0, 0);
3623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3624 IEM_MC_ADVANCE_RIP();
3625 } IEM_MC_ELSE() {
3626 IEM_MC_REL_JMP_S16(i16Imm);
3627 } IEM_MC_ENDIF();
3628 IEM_MC_END();
3629 }
3630 else
3631 {
3632 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3633 IEMOP_HLP_NO_LOCK_PREFIX();
3634
3635 IEM_MC_BEGIN(0, 0);
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_ADVANCE_RIP();
3638 } IEM_MC_ELSE() {
3639 IEM_MC_REL_JMP_S32(i32Imm);
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642 }
3643 return VINF_SUCCESS;
3644}
3645
3646
3647/** Opcode 0x0f 0x86. */
3648FNIEMOP_DEF(iemOp_jbe_Jv)
3649{
3650 IEMOP_MNEMONIC("jbe/jna Jv");
3651 IEMOP_HLP_MIN_386();
3652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3653 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3654 {
3655 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3656 IEMOP_HLP_NO_LOCK_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3660 IEM_MC_REL_JMP_S16(i16Imm);
3661 } IEM_MC_ELSE() {
3662 IEM_MC_ADVANCE_RIP();
3663 } IEM_MC_ENDIF();
3664 IEM_MC_END();
3665 }
3666 else
3667 {
3668 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3669 IEMOP_HLP_NO_LOCK_PREFIX();
3670
3671 IEM_MC_BEGIN(0, 0);
3672 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3673 IEM_MC_REL_JMP_S32(i32Imm);
3674 } IEM_MC_ELSE() {
3675 IEM_MC_ADVANCE_RIP();
3676 } IEM_MC_ENDIF();
3677 IEM_MC_END();
3678 }
3679 return VINF_SUCCESS;
3680}
3681
3682
3683/** Opcode 0x0f 0x87. */
3684FNIEMOP_DEF(iemOp_jnbe_Jv)
3685{
3686 IEMOP_MNEMONIC("jnbe/ja Jv");
3687 IEMOP_HLP_MIN_386();
3688 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3689 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3690 {
3691 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3692 IEMOP_HLP_NO_LOCK_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0);
3695 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3696 IEM_MC_ADVANCE_RIP();
3697 } IEM_MC_ELSE() {
3698 IEM_MC_REL_JMP_S16(i16Imm);
3699 } IEM_MC_ENDIF();
3700 IEM_MC_END();
3701 }
3702 else
3703 {
3704 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3705 IEMOP_HLP_NO_LOCK_PREFIX();
3706
3707 IEM_MC_BEGIN(0, 0);
3708 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3709 IEM_MC_ADVANCE_RIP();
3710 } IEM_MC_ELSE() {
3711 IEM_MC_REL_JMP_S32(i32Imm);
3712 } IEM_MC_ENDIF();
3713 IEM_MC_END();
3714 }
3715 return VINF_SUCCESS;
3716}
3717
3718
3719/** Opcode 0x0f 0x88. */
3720FNIEMOP_DEF(iemOp_js_Jv)
3721{
3722 IEMOP_MNEMONIC("js Jv");
3723 IEMOP_HLP_MIN_386();
3724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3725 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3726 {
3727 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3728 IEMOP_HLP_NO_LOCK_PREFIX();
3729
3730 IEM_MC_BEGIN(0, 0);
3731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3732 IEM_MC_REL_JMP_S16(i16Imm);
3733 } IEM_MC_ELSE() {
3734 IEM_MC_ADVANCE_RIP();
3735 } IEM_MC_ENDIF();
3736 IEM_MC_END();
3737 }
3738 else
3739 {
3740 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3741 IEMOP_HLP_NO_LOCK_PREFIX();
3742
3743 IEM_MC_BEGIN(0, 0);
3744 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3745 IEM_MC_REL_JMP_S32(i32Imm);
3746 } IEM_MC_ELSE() {
3747 IEM_MC_ADVANCE_RIP();
3748 } IEM_MC_ENDIF();
3749 IEM_MC_END();
3750 }
3751 return VINF_SUCCESS;
3752}
3753
3754
3755/** Opcode 0x0f 0x89. */
3756FNIEMOP_DEF(iemOp_jns_Jv)
3757{
3758 IEMOP_MNEMONIC("jns Jv");
3759 IEMOP_HLP_MIN_386();
3760 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3761 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3762 {
3763 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3764 IEMOP_HLP_NO_LOCK_PREFIX();
3765
3766 IEM_MC_BEGIN(0, 0);
3767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3768 IEM_MC_ADVANCE_RIP();
3769 } IEM_MC_ELSE() {
3770 IEM_MC_REL_JMP_S16(i16Imm);
3771 } IEM_MC_ENDIF();
3772 IEM_MC_END();
3773 }
3774 else
3775 {
3776 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3777 IEMOP_HLP_NO_LOCK_PREFIX();
3778
3779 IEM_MC_BEGIN(0, 0);
3780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3781 IEM_MC_ADVANCE_RIP();
3782 } IEM_MC_ELSE() {
3783 IEM_MC_REL_JMP_S32(i32Imm);
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786 }
3787 return VINF_SUCCESS;
3788}
3789
3790
3791/** Opcode 0x0f 0x8a. */
3792FNIEMOP_DEF(iemOp_jp_Jv)
3793{
3794 IEMOP_MNEMONIC("jp Jv");
3795 IEMOP_HLP_MIN_386();
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3797 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3798 {
3799 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3800 IEMOP_HLP_NO_LOCK_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0);
3803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3804 IEM_MC_REL_JMP_S16(i16Imm);
3805 } IEM_MC_ELSE() {
3806 IEM_MC_ADVANCE_RIP();
3807 } IEM_MC_ENDIF();
3808 IEM_MC_END();
3809 }
3810 else
3811 {
3812 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3813 IEMOP_HLP_NO_LOCK_PREFIX();
3814
3815 IEM_MC_BEGIN(0, 0);
3816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3817 IEM_MC_REL_JMP_S32(i32Imm);
3818 } IEM_MC_ELSE() {
3819 IEM_MC_ADVANCE_RIP();
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822 }
3823 return VINF_SUCCESS;
3824}
3825
3826
3827/** Opcode 0x0f 0x8b. */
3828FNIEMOP_DEF(iemOp_jnp_Jv)
3829{
3830 IEMOP_MNEMONIC("jo Jv");
3831 IEMOP_HLP_MIN_386();
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3833 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3834 {
3835 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3836 IEMOP_HLP_NO_LOCK_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0);
3839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3840 IEM_MC_ADVANCE_RIP();
3841 } IEM_MC_ELSE() {
3842 IEM_MC_REL_JMP_S16(i16Imm);
3843 } IEM_MC_ENDIF();
3844 IEM_MC_END();
3845 }
3846 else
3847 {
3848 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3849 IEMOP_HLP_NO_LOCK_PREFIX();
3850
3851 IEM_MC_BEGIN(0, 0);
3852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3853 IEM_MC_ADVANCE_RIP();
3854 } IEM_MC_ELSE() {
3855 IEM_MC_REL_JMP_S32(i32Imm);
3856 } IEM_MC_ENDIF();
3857 IEM_MC_END();
3858 }
3859 return VINF_SUCCESS;
3860}
3861
3862
3863/** Opcode 0x0f 0x8c. */
3864FNIEMOP_DEF(iemOp_jl_Jv)
3865{
3866 IEMOP_MNEMONIC("jl/jnge Jv");
3867 IEMOP_HLP_MIN_386();
3868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3869 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3870 {
3871 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3872 IEMOP_HLP_NO_LOCK_PREFIX();
3873
3874 IEM_MC_BEGIN(0, 0);
3875 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3876 IEM_MC_REL_JMP_S16(i16Imm);
3877 } IEM_MC_ELSE() {
3878 IEM_MC_ADVANCE_RIP();
3879 } IEM_MC_ENDIF();
3880 IEM_MC_END();
3881 }
3882 else
3883 {
3884 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3885 IEMOP_HLP_NO_LOCK_PREFIX();
3886
3887 IEM_MC_BEGIN(0, 0);
3888 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3889 IEM_MC_REL_JMP_S32(i32Imm);
3890 } IEM_MC_ELSE() {
3891 IEM_MC_ADVANCE_RIP();
3892 } IEM_MC_ENDIF();
3893 IEM_MC_END();
3894 }
3895 return VINF_SUCCESS;
3896}
3897
3898
3899/** Opcode 0x0f 0x8d. */
3900FNIEMOP_DEF(iemOp_jnl_Jv)
3901{
3902 IEMOP_MNEMONIC("jnl/jge Jv");
3903 IEMOP_HLP_MIN_386();
3904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3905 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3906 {
3907 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3908 IEMOP_HLP_NO_LOCK_PREFIX();
3909
3910 IEM_MC_BEGIN(0, 0);
3911 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3912 IEM_MC_ADVANCE_RIP();
3913 } IEM_MC_ELSE() {
3914 IEM_MC_REL_JMP_S16(i16Imm);
3915 } IEM_MC_ENDIF();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3921 IEMOP_HLP_NO_LOCK_PREFIX();
3922
3923 IEM_MC_BEGIN(0, 0);
3924 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3925 IEM_MC_ADVANCE_RIP();
3926 } IEM_MC_ELSE() {
3927 IEM_MC_REL_JMP_S32(i32Imm);
3928 } IEM_MC_ENDIF();
3929 IEM_MC_END();
3930 }
3931 return VINF_SUCCESS;
3932}
3933
3934
3935/** Opcode 0x0f 0x8e. */
3936FNIEMOP_DEF(iemOp_jle_Jv)
3937{
3938 IEMOP_MNEMONIC("jle/jng Jv");
3939 IEMOP_HLP_MIN_386();
3940 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3941 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3942 {
3943 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3944 IEMOP_HLP_NO_LOCK_PREFIX();
3945
3946 IEM_MC_BEGIN(0, 0);
3947 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3948 IEM_MC_REL_JMP_S16(i16Imm);
3949 } IEM_MC_ELSE() {
3950 IEM_MC_ADVANCE_RIP();
3951 } IEM_MC_ENDIF();
3952 IEM_MC_END();
3953 }
3954 else
3955 {
3956 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3957 IEMOP_HLP_NO_LOCK_PREFIX();
3958
3959 IEM_MC_BEGIN(0, 0);
3960 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3961 IEM_MC_REL_JMP_S32(i32Imm);
3962 } IEM_MC_ELSE() {
3963 IEM_MC_ADVANCE_RIP();
3964 } IEM_MC_ENDIF();
3965 IEM_MC_END();
3966 }
3967 return VINF_SUCCESS;
3968}
3969
3970
3971/** Opcode 0x0f 0x8f. */
3972FNIEMOP_DEF(iemOp_jnle_Jv)
3973{
3974 IEMOP_MNEMONIC("jnle/jg Jv");
3975 IEMOP_HLP_MIN_386();
3976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3977 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3978 {
3979 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3980 IEMOP_HLP_NO_LOCK_PREFIX();
3981
3982 IEM_MC_BEGIN(0, 0);
3983 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3984 IEM_MC_ADVANCE_RIP();
3985 } IEM_MC_ELSE() {
3986 IEM_MC_REL_JMP_S16(i16Imm);
3987 } IEM_MC_ENDIF();
3988 IEM_MC_END();
3989 }
3990 else
3991 {
3992 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3993 IEMOP_HLP_NO_LOCK_PREFIX();
3994
3995 IEM_MC_BEGIN(0, 0);
3996 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3997 IEM_MC_ADVANCE_RIP();
3998 } IEM_MC_ELSE() {
3999 IEM_MC_REL_JMP_S32(i32Imm);
4000 } IEM_MC_ENDIF();
4001 IEM_MC_END();
4002 }
4003 return VINF_SUCCESS;
4004}
4005
4006
4007/** Opcode 0x0f 0x90. */
4008FNIEMOP_DEF(iemOp_seto_Eb)
4009{
4010 IEMOP_MNEMONIC("seto Eb");
4011 IEMOP_HLP_MIN_386();
4012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4013 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4014
4015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4016 * any way. AMD says it's "unused", whatever that means. We're
4017 * ignoring for now. */
4018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4019 {
4020 /* register target */
4021 IEM_MC_BEGIN(0, 0);
4022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4023 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4024 } IEM_MC_ELSE() {
4025 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4026 } IEM_MC_ENDIF();
4027 IEM_MC_ADVANCE_RIP();
4028 IEM_MC_END();
4029 }
4030 else
4031 {
4032 /* memory target */
4033 IEM_MC_BEGIN(0, 1);
4034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4037 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4038 } IEM_MC_ELSE() {
4039 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4040 } IEM_MC_ENDIF();
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 }
4044 return VINF_SUCCESS;
4045}
4046
4047
4048/** Opcode 0x0f 0x91. */
4049FNIEMOP_DEF(iemOp_setno_Eb)
4050{
4051 IEMOP_MNEMONIC("setno Eb");
4052 IEMOP_HLP_MIN_386();
4053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4054 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4055
4056 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4057 * any way. AMD says it's "unused", whatever that means. We're
4058 * ignoring for now. */
4059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4060 {
4061 /* register target */
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4064 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4065 } IEM_MC_ELSE() {
4066 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4067 } IEM_MC_ENDIF();
4068 IEM_MC_ADVANCE_RIP();
4069 IEM_MC_END();
4070 }
4071 else
4072 {
4073 /* memory target */
4074 IEM_MC_BEGIN(0, 1);
4075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4078 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4079 } IEM_MC_ELSE() {
4080 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4081 } IEM_MC_ENDIF();
4082 IEM_MC_ADVANCE_RIP();
4083 IEM_MC_END();
4084 }
4085 return VINF_SUCCESS;
4086}
4087
4088
4089/** Opcode 0x0f 0x92. */
4090FNIEMOP_DEF(iemOp_setc_Eb)
4091{
4092 IEMOP_MNEMONIC("setc Eb");
4093 IEMOP_HLP_MIN_386();
4094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4095 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEM_MC_BEGIN(0, 0);
4104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4105 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4106 } IEM_MC_ELSE() {
4107 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4108 } IEM_MC_ENDIF();
4109 IEM_MC_ADVANCE_RIP();
4110 IEM_MC_END();
4111 }
4112 else
4113 {
4114 /* memory target */
4115 IEM_MC_BEGIN(0, 1);
4116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4118 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4119 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4120 } IEM_MC_ELSE() {
4121 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4122 } IEM_MC_ENDIF();
4123 IEM_MC_ADVANCE_RIP();
4124 IEM_MC_END();
4125 }
4126 return VINF_SUCCESS;
4127}
4128
4129
4130/** Opcode 0x0f 0x93. */
4131FNIEMOP_DEF(iemOp_setnc_Eb)
4132{
4133 IEMOP_MNEMONIC("setnc Eb");
4134 IEMOP_HLP_MIN_386();
4135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4136 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4137
4138 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4139 * any way. AMD says it's "unused", whatever that means. We're
4140 * ignoring for now. */
4141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4142 {
4143 /* register target */
4144 IEM_MC_BEGIN(0, 0);
4145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4146 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4147 } IEM_MC_ELSE() {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4149 } IEM_MC_ENDIF();
4150 IEM_MC_ADVANCE_RIP();
4151 IEM_MC_END();
4152 }
4153 else
4154 {
4155 /* memory target */
4156 IEM_MC_BEGIN(0, 1);
4157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4160 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4161 } IEM_MC_ELSE() {
4162 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 return VINF_SUCCESS;
4168}
4169
4170
4171/** Opcode 0x0f 0x94. */
4172FNIEMOP_DEF(iemOp_sete_Eb)
4173{
4174 IEMOP_MNEMONIC("sete Eb");
4175 IEMOP_HLP_MIN_386();
4176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4177 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4178
4179 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4180 * any way. AMD says it's "unused", whatever that means. We're
4181 * ignoring for now. */
4182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4183 {
4184 /* register target */
4185 IEM_MC_BEGIN(0, 0);
4186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4187 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4188 } IEM_MC_ELSE() {
4189 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4190 } IEM_MC_ENDIF();
4191 IEM_MC_ADVANCE_RIP();
4192 IEM_MC_END();
4193 }
4194 else
4195 {
4196 /* memory target */
4197 IEM_MC_BEGIN(0, 1);
4198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4201 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4204 } IEM_MC_ENDIF();
4205 IEM_MC_ADVANCE_RIP();
4206 IEM_MC_END();
4207 }
4208 return VINF_SUCCESS;
4209}
4210
4211
4212/** Opcode 0x0f 0x95. */
4213FNIEMOP_DEF(iemOp_setne_Eb)
4214{
4215 IEMOP_MNEMONIC("setne Eb");
4216 IEMOP_HLP_MIN_386();
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4219
4220 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4221 * any way. AMD says it's "unused", whatever that means. We're
4222 * ignoring for now. */
4223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4224 {
4225 /* register target */
4226 IEM_MC_BEGIN(0, 0);
4227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4228 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4229 } IEM_MC_ELSE() {
4230 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4231 } IEM_MC_ENDIF();
4232 IEM_MC_ADVANCE_RIP();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 /* memory target */
4238 IEM_MC_BEGIN(0, 1);
4239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4242 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4243 } IEM_MC_ELSE() {
4244 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4245 } IEM_MC_ENDIF();
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 return VINF_SUCCESS;
4250}
4251
4252
4253/** Opcode 0x0f 0x96. */
4254FNIEMOP_DEF(iemOp_setbe_Eb)
4255{
4256 IEMOP_MNEMONIC("setbe Eb");
4257 IEMOP_HLP_MIN_386();
4258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4259 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4260
4261 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4262 * any way. AMD says it's "unused", whatever that means. We're
4263 * ignoring for now. */
4264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4265 {
4266 /* register target */
4267 IEM_MC_BEGIN(0, 0);
4268 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4269 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4270 } IEM_MC_ELSE() {
4271 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4272 } IEM_MC_ENDIF();
4273 IEM_MC_ADVANCE_RIP();
4274 IEM_MC_END();
4275 }
4276 else
4277 {
4278 /* memory target */
4279 IEM_MC_BEGIN(0, 1);
4280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4282 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4283 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4284 } IEM_MC_ELSE() {
4285 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4286 } IEM_MC_ENDIF();
4287 IEM_MC_ADVANCE_RIP();
4288 IEM_MC_END();
4289 }
4290 return VINF_SUCCESS;
4291}
4292
4293
4294/** Opcode 0x0f 0x97. */
4295FNIEMOP_DEF(iemOp_setnbe_Eb)
4296{
4297 IEMOP_MNEMONIC("setnbe Eb");
4298 IEMOP_HLP_MIN_386();
4299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4300 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4301
4302 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4303 * any way. AMD says it's "unused", whatever that means. We're
4304 * ignoring for now. */
4305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4306 {
4307 /* register target */
4308 IEM_MC_BEGIN(0, 0);
4309 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4310 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4311 } IEM_MC_ELSE() {
4312 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4313 } IEM_MC_ENDIF();
4314 IEM_MC_ADVANCE_RIP();
4315 IEM_MC_END();
4316 }
4317 else
4318 {
4319 /* memory target */
4320 IEM_MC_BEGIN(0, 1);
4321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4323 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4324 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4325 } IEM_MC_ELSE() {
4326 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4327 } IEM_MC_ENDIF();
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 }
4331 return VINF_SUCCESS;
4332}
4333
4334
4335/** Opcode 0x0f 0x98. */
4336FNIEMOP_DEF(iemOp_sets_Eb)
4337{
4338 IEMOP_MNEMONIC("sets Eb");
4339 IEMOP_HLP_MIN_386();
4340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4341 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4342
4343 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4344 * any way. AMD says it's "unused", whatever that means. We're
4345 * ignoring for now. */
4346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4347 {
4348 /* register target */
4349 IEM_MC_BEGIN(0, 0);
4350 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4351 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4352 } IEM_MC_ELSE() {
4353 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4354 } IEM_MC_ENDIF();
4355 IEM_MC_ADVANCE_RIP();
4356 IEM_MC_END();
4357 }
4358 else
4359 {
4360 /* memory target */
4361 IEM_MC_BEGIN(0, 1);
4362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4365 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4366 } IEM_MC_ELSE() {
4367 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4368 } IEM_MC_ENDIF();
4369 IEM_MC_ADVANCE_RIP();
4370 IEM_MC_END();
4371 }
4372 return VINF_SUCCESS;
4373}
4374
4375
4376/** Opcode 0x0f 0x99. */
4377FNIEMOP_DEF(iemOp_setns_Eb)
4378{
4379 IEMOP_MNEMONIC("setns Eb");
4380 IEMOP_HLP_MIN_386();
4381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4382 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4383
4384 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4385 * any way. AMD says it's "unused", whatever that means. We're
4386 * ignoring for now. */
4387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4388 {
4389 /* register target */
4390 IEM_MC_BEGIN(0, 0);
4391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4392 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4393 } IEM_MC_ELSE() {
4394 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4395 } IEM_MC_ENDIF();
4396 IEM_MC_ADVANCE_RIP();
4397 IEM_MC_END();
4398 }
4399 else
4400 {
4401 /* memory target */
4402 IEM_MC_BEGIN(0, 1);
4403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4405 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4406 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4407 } IEM_MC_ELSE() {
4408 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4409 } IEM_MC_ENDIF();
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 return VINF_SUCCESS;
4414}
4415
4416
4417/** Opcode 0x0f 0x9a. */
4418FNIEMOP_DEF(iemOp_setp_Eb)
4419{
4420 IEMOP_MNEMONIC("setnp Eb");
4421 IEMOP_HLP_MIN_386();
4422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4423 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4424
4425 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4426 * any way. AMD says it's "unused", whatever that means. We're
4427 * ignoring for now. */
4428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4429 {
4430 /* register target */
4431 IEM_MC_BEGIN(0, 0);
4432 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4433 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4434 } IEM_MC_ELSE() {
4435 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4436 } IEM_MC_ENDIF();
4437 IEM_MC_ADVANCE_RIP();
4438 IEM_MC_END();
4439 }
4440 else
4441 {
4442 /* memory target */
4443 IEM_MC_BEGIN(0, 1);
4444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4447 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4448 } IEM_MC_ELSE() {
4449 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 return VINF_SUCCESS;
4455}
4456
4457
4458/** Opcode 0x0f 0x9b. */
4459FNIEMOP_DEF(iemOp_setnp_Eb)
4460{
4461 IEMOP_MNEMONIC("setnp Eb");
4462 IEMOP_HLP_MIN_386();
4463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4464 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4465
4466 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4467 * any way. AMD says it's "unused", whatever that means. We're
4468 * ignoring for now. */
4469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4470 {
4471 /* register target */
4472 IEM_MC_BEGIN(0, 0);
4473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4474 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4475 } IEM_MC_ELSE() {
4476 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4477 } IEM_MC_ENDIF();
4478 IEM_MC_ADVANCE_RIP();
4479 IEM_MC_END();
4480 }
4481 else
4482 {
4483 /* memory target */
4484 IEM_MC_BEGIN(0, 1);
4485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4488 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4489 } IEM_MC_ELSE() {
4490 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4491 } IEM_MC_ENDIF();
4492 IEM_MC_ADVANCE_RIP();
4493 IEM_MC_END();
4494 }
4495 return VINF_SUCCESS;
4496}
4497
4498
4499/** Opcode 0x0f 0x9c. */
4500FNIEMOP_DEF(iemOp_setl_Eb)
4501{
4502 IEMOP_MNEMONIC("setl Eb");
4503 IEMOP_HLP_MIN_386();
4504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4505 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4506
4507 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4508 * any way. AMD says it's "unused", whatever that means. We're
4509 * ignoring for now. */
4510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4511 {
4512 /* register target */
4513 IEM_MC_BEGIN(0, 0);
4514 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4515 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4516 } IEM_MC_ELSE() {
4517 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4518 } IEM_MC_ENDIF();
4519 IEM_MC_ADVANCE_RIP();
4520 IEM_MC_END();
4521 }
4522 else
4523 {
4524 /* memory target */
4525 IEM_MC_BEGIN(0, 1);
4526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4528 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4529 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4530 } IEM_MC_ELSE() {
4531 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4532 } IEM_MC_ENDIF();
4533 IEM_MC_ADVANCE_RIP();
4534 IEM_MC_END();
4535 }
4536 return VINF_SUCCESS;
4537}
4538
4539
4540/** Opcode 0x0f 0x9d. */
4541FNIEMOP_DEF(iemOp_setnl_Eb)
4542{
4543 IEMOP_MNEMONIC("setnl Eb");
4544 IEMOP_HLP_MIN_386();
4545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4546 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4547
4548 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4549 * any way. AMD says it's "unused", whatever that means. We're
4550 * ignoring for now. */
4551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4552 {
4553 /* register target */
4554 IEM_MC_BEGIN(0, 0);
4555 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4556 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4557 } IEM_MC_ELSE() {
4558 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4559 } IEM_MC_ENDIF();
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 }
4563 else
4564 {
4565 /* memory target */
4566 IEM_MC_BEGIN(0, 1);
4567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4569 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4570 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4571 } IEM_MC_ELSE() {
4572 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4573 } IEM_MC_ENDIF();
4574 IEM_MC_ADVANCE_RIP();
4575 IEM_MC_END();
4576 }
4577 return VINF_SUCCESS;
4578}
4579
4580
4581/** Opcode 0x0f 0x9e. */
4582FNIEMOP_DEF(iemOp_setle_Eb)
4583{
4584 IEMOP_MNEMONIC("setle Eb");
4585 IEMOP_HLP_MIN_386();
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4588
4589 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4590 * any way. AMD says it's "unused", whatever that means. We're
4591 * ignoring for now. */
4592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4593 {
4594 /* register target */
4595 IEM_MC_BEGIN(0, 0);
4596 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4597 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4598 } IEM_MC_ELSE() {
4599 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4600 } IEM_MC_ENDIF();
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 }
4604 else
4605 {
4606 /* memory target */
4607 IEM_MC_BEGIN(0, 1);
4608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4610 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4611 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4612 } IEM_MC_ELSE() {
4613 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4614 } IEM_MC_ENDIF();
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 }
4618 return VINF_SUCCESS;
4619}
4620
4621
4622/** Opcode 0x0f 0x9f. */
4623FNIEMOP_DEF(iemOp_setnle_Eb)
4624{
4625 IEMOP_MNEMONIC("setnle Eb");
4626 IEMOP_HLP_MIN_386();
4627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4628 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4629
4630 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4631 * any way. AMD says it's "unused", whatever that means. We're
4632 * ignoring for now. */
4633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4634 {
4635 /* register target */
4636 IEM_MC_BEGIN(0, 0);
4637 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4638 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4639 } IEM_MC_ELSE() {
4640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4641 } IEM_MC_ENDIF();
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 }
4645 else
4646 {
4647 /* memory target */
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4651 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 return VINF_SUCCESS;
4660}
4661
4662
4663/**
4664 * Common 'push segment-register' helper.
4665 */
4666FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4667{
4668 IEMOP_HLP_NO_LOCK_PREFIX();
4669 if (iReg < X86_SREG_FS)
4670 IEMOP_HLP_NO_64BIT();
4671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4672
4673 switch (pIemCpu->enmEffOpSize)
4674 {
4675 case IEMMODE_16BIT:
4676 IEM_MC_BEGIN(0, 1);
4677 IEM_MC_LOCAL(uint16_t, u16Value);
4678 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4679 IEM_MC_PUSH_U16(u16Value);
4680 IEM_MC_ADVANCE_RIP();
4681 IEM_MC_END();
4682 break;
4683
4684 case IEMMODE_32BIT:
4685 IEM_MC_BEGIN(0, 1);
4686 IEM_MC_LOCAL(uint32_t, u32Value);
4687 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4688 IEM_MC_PUSH_U32_SREG(u32Value);
4689 IEM_MC_ADVANCE_RIP();
4690 IEM_MC_END();
4691 break;
4692
4693 case IEMMODE_64BIT:
4694 IEM_MC_BEGIN(0, 1);
4695 IEM_MC_LOCAL(uint64_t, u64Value);
4696 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4697 IEM_MC_PUSH_U64(u64Value);
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 break;
4701 }
4702
4703 return VINF_SUCCESS;
4704}
4705
4706
4707/** Opcode 0x0f 0xa0. */
4708FNIEMOP_DEF(iemOp_push_fs)
4709{
4710 IEMOP_MNEMONIC("push fs");
4711 IEMOP_HLP_MIN_386();
4712 IEMOP_HLP_NO_LOCK_PREFIX();
4713 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4714}
4715
4716
4717/** Opcode 0x0f 0xa1. */
4718FNIEMOP_DEF(iemOp_pop_fs)
4719{
4720 IEMOP_MNEMONIC("pop fs");
4721 IEMOP_HLP_MIN_386();
4722 IEMOP_HLP_NO_LOCK_PREFIX();
4723 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4724}
4725
4726
4727/** Opcode 0x0f 0xa2. */
4728FNIEMOP_DEF(iemOp_cpuid)
4729{
4730 IEMOP_MNEMONIC("cpuid");
4731 IEMOP_HLP_MIN_486(); /* not all 486es. */
4732 IEMOP_HLP_NO_LOCK_PREFIX();
4733 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4734}
4735
4736
4737/**
4738 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4739 * iemOp_bts_Ev_Gv.
4740 */
4741FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4742{
4743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4744 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4745
4746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4747 {
4748 /* register destination. */
4749 IEMOP_HLP_NO_LOCK_PREFIX();
4750 switch (pIemCpu->enmEffOpSize)
4751 {
4752 case IEMMODE_16BIT:
4753 IEM_MC_BEGIN(3, 0);
4754 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4755 IEM_MC_ARG(uint16_t, u16Src, 1);
4756 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4757
4758 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4759 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4760 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4761 IEM_MC_REF_EFLAGS(pEFlags);
4762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4763
4764 IEM_MC_ADVANCE_RIP();
4765 IEM_MC_END();
4766 return VINF_SUCCESS;
4767
4768 case IEMMODE_32BIT:
4769 IEM_MC_BEGIN(3, 0);
4770 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4771 IEM_MC_ARG(uint32_t, u32Src, 1);
4772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4773
4774 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4775 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4776 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4777 IEM_MC_REF_EFLAGS(pEFlags);
4778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4779
4780 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4781 IEM_MC_ADVANCE_RIP();
4782 IEM_MC_END();
4783 return VINF_SUCCESS;
4784
4785 case IEMMODE_64BIT:
4786 IEM_MC_BEGIN(3, 0);
4787 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4788 IEM_MC_ARG(uint64_t, u64Src, 1);
4789 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4790
4791 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4792 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4793 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4794 IEM_MC_REF_EFLAGS(pEFlags);
4795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4796
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 return VINF_SUCCESS;
4800
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4802 }
4803 }
4804 else
4805 {
4806 /* memory destination. */
4807
4808 uint32_t fAccess;
4809 if (pImpl->pfnLockedU16)
4810 fAccess = IEM_ACCESS_DATA_RW;
4811 else /* BT */
4812 {
4813 IEMOP_HLP_NO_LOCK_PREFIX();
4814 fAccess = IEM_ACCESS_DATA_R;
4815 }
4816
4817 NOREF(fAccess);
4818
4819 /** @todo test negative bit offsets! */
4820 switch (pIemCpu->enmEffOpSize)
4821 {
4822 case IEMMODE_16BIT:
4823 IEM_MC_BEGIN(3, 2);
4824 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4825 IEM_MC_ARG(uint16_t, u16Src, 1);
4826 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4828 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4829
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4831 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4832 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4833 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4834 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4835 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4836 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4837 IEM_MC_FETCH_EFLAGS(EFlags);
4838
4839 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4840 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4842 else
4843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4844 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4845
4846 IEM_MC_COMMIT_EFLAGS(EFlags);
4847 IEM_MC_ADVANCE_RIP();
4848 IEM_MC_END();
4849 return VINF_SUCCESS;
4850
4851 case IEMMODE_32BIT:
4852 IEM_MC_BEGIN(3, 2);
4853 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4854 IEM_MC_ARG(uint32_t, u32Src, 1);
4855 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4857 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4858
4859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4860 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4861 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4862 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4863 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4864 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4865 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4866 IEM_MC_FETCH_EFLAGS(EFlags);
4867
4868 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4869 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4871 else
4872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4873 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4874
4875 IEM_MC_COMMIT_EFLAGS(EFlags);
4876 IEM_MC_ADVANCE_RIP();
4877 IEM_MC_END();
4878 return VINF_SUCCESS;
4879
4880 case IEMMODE_64BIT:
4881 IEM_MC_BEGIN(3, 2);
4882 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4883 IEM_MC_ARG(uint64_t, u64Src, 1);
4884 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4886 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4887
4888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4889 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4890 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4891 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4892 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4893 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4894 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4895 IEM_MC_FETCH_EFLAGS(EFlags);
4896
4897 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4898 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4900 else
4901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4903
4904 IEM_MC_COMMIT_EFLAGS(EFlags);
4905 IEM_MC_ADVANCE_RIP();
4906 IEM_MC_END();
4907 return VINF_SUCCESS;
4908
4909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4910 }
4911 }
4912}
4913
4914
4915/** Opcode 0x0f 0xa3. */
4916FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4917{
4918 IEMOP_MNEMONIC("bt Gv,Gv");
4919 IEMOP_HLP_MIN_386();
4920 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4921}
4922
4923
4924/**
4925 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4926 */
4927FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4928{
4929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4930 IEMOP_HLP_NO_LOCK_PREFIX();
4931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4932
4933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4934 {
4935 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4936 IEMOP_HLP_NO_LOCK_PREFIX();
4937
4938 switch (pIemCpu->enmEffOpSize)
4939 {
4940 case IEMMODE_16BIT:
4941 IEM_MC_BEGIN(4, 0);
4942 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4943 IEM_MC_ARG(uint16_t, u16Src, 1);
4944 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4945 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4946
4947 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4948 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4949 IEM_MC_REF_EFLAGS(pEFlags);
4950 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4951
4952 IEM_MC_ADVANCE_RIP();
4953 IEM_MC_END();
4954 return VINF_SUCCESS;
4955
4956 case IEMMODE_32BIT:
4957 IEM_MC_BEGIN(4, 0);
4958 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4959 IEM_MC_ARG(uint32_t, u32Src, 1);
4960 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4961 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4962
4963 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4964 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4965 IEM_MC_REF_EFLAGS(pEFlags);
4966 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4967
4968 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4969 IEM_MC_ADVANCE_RIP();
4970 IEM_MC_END();
4971 return VINF_SUCCESS;
4972
4973 case IEMMODE_64BIT:
4974 IEM_MC_BEGIN(4, 0);
4975 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4976 IEM_MC_ARG(uint64_t, u64Src, 1);
4977 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4978 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4979
4980 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4981 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4982 IEM_MC_REF_EFLAGS(pEFlags);
4983 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4984
4985 IEM_MC_ADVANCE_RIP();
4986 IEM_MC_END();
4987 return VINF_SUCCESS;
4988
4989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4990 }
4991 }
4992 else
4993 {
4994 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4995
4996 switch (pIemCpu->enmEffOpSize)
4997 {
4998 case IEMMODE_16BIT:
4999 IEM_MC_BEGIN(4, 2);
5000 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5001 IEM_MC_ARG(uint16_t, u16Src, 1);
5002 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5005
5006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5007 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5008 IEM_MC_ASSIGN(cShiftArg, cShift);
5009 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5010 IEM_MC_FETCH_EFLAGS(EFlags);
5011 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5012 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5013
5014 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5015 IEM_MC_COMMIT_EFLAGS(EFlags);
5016 IEM_MC_ADVANCE_RIP();
5017 IEM_MC_END();
5018 return VINF_SUCCESS;
5019
5020 case IEMMODE_32BIT:
5021 IEM_MC_BEGIN(4, 2);
5022 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5023 IEM_MC_ARG(uint32_t, u32Src, 1);
5024 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5025 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5027
5028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5029 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5030 IEM_MC_ASSIGN(cShiftArg, cShift);
5031 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5032 IEM_MC_FETCH_EFLAGS(EFlags);
5033 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5034 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5035
5036 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5037 IEM_MC_COMMIT_EFLAGS(EFlags);
5038 IEM_MC_ADVANCE_RIP();
5039 IEM_MC_END();
5040 return VINF_SUCCESS;
5041
5042 case IEMMODE_64BIT:
5043 IEM_MC_BEGIN(4, 2);
5044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5045 IEM_MC_ARG(uint64_t, u64Src, 1);
5046 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5047 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5049
5050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5051 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5052 IEM_MC_ASSIGN(cShiftArg, cShift);
5053 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5054 IEM_MC_FETCH_EFLAGS(EFlags);
5055 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5056 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5057
5058 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5059 IEM_MC_COMMIT_EFLAGS(EFlags);
5060 IEM_MC_ADVANCE_RIP();
5061 IEM_MC_END();
5062 return VINF_SUCCESS;
5063
5064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5065 }
5066 }
5067}
5068
5069
5070/**
5071 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5072 */
5073FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5074{
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 IEMOP_HLP_NO_LOCK_PREFIX();
5077 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5078
5079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5080 {
5081 IEMOP_HLP_NO_LOCK_PREFIX();
5082
5083 switch (pIemCpu->enmEffOpSize)
5084 {
5085 case IEMMODE_16BIT:
5086 IEM_MC_BEGIN(4, 0);
5087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5088 IEM_MC_ARG(uint16_t, u16Src, 1);
5089 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5090 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5091
5092 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5093 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5094 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5095 IEM_MC_REF_EFLAGS(pEFlags);
5096 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5097
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 return VINF_SUCCESS;
5101
5102 case IEMMODE_32BIT:
5103 IEM_MC_BEGIN(4, 0);
5104 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5105 IEM_MC_ARG(uint32_t, u32Src, 1);
5106 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5107 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5108
5109 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5110 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5111 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5112 IEM_MC_REF_EFLAGS(pEFlags);
5113 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5114
5115 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5116 IEM_MC_ADVANCE_RIP();
5117 IEM_MC_END();
5118 return VINF_SUCCESS;
5119
5120 case IEMMODE_64BIT:
5121 IEM_MC_BEGIN(4, 0);
5122 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5123 IEM_MC_ARG(uint64_t, u64Src, 1);
5124 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5125 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5126
5127 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5128 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5129 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5130 IEM_MC_REF_EFLAGS(pEFlags);
5131 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5132
5133 IEM_MC_ADVANCE_RIP();
5134 IEM_MC_END();
5135 return VINF_SUCCESS;
5136
5137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5138 }
5139 }
5140 else
5141 {
5142 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
5143
5144 switch (pIemCpu->enmEffOpSize)
5145 {
5146 case IEMMODE_16BIT:
5147 IEM_MC_BEGIN(4, 2);
5148 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5149 IEM_MC_ARG(uint16_t, u16Src, 1);
5150 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5151 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5153
5154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5155 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5156 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5157 IEM_MC_FETCH_EFLAGS(EFlags);
5158 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5159 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5160
5161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5162 IEM_MC_COMMIT_EFLAGS(EFlags);
5163 IEM_MC_ADVANCE_RIP();
5164 IEM_MC_END();
5165 return VINF_SUCCESS;
5166
5167 case IEMMODE_32BIT:
5168 IEM_MC_BEGIN(4, 2);
5169 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5170 IEM_MC_ARG(uint32_t, u32Src, 1);
5171 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5174
5175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5176 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5177 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5178 IEM_MC_FETCH_EFLAGS(EFlags);
5179 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5180 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5181
5182 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5183 IEM_MC_COMMIT_EFLAGS(EFlags);
5184 IEM_MC_ADVANCE_RIP();
5185 IEM_MC_END();
5186 return VINF_SUCCESS;
5187
5188 case IEMMODE_64BIT:
5189 IEM_MC_BEGIN(4, 2);
5190 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5191 IEM_MC_ARG(uint64_t, u64Src, 1);
5192 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5195
5196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5197 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5198 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5199 IEM_MC_FETCH_EFLAGS(EFlags);
5200 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5202
5203 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5204 IEM_MC_COMMIT_EFLAGS(EFlags);
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 return VINF_SUCCESS;
5208
5209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5210 }
5211 }
5212}
5213
5214
5215
5216/** Opcode 0x0f 0xa4. */
5217FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5218{
5219 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5220 IEMOP_HLP_MIN_386();
5221 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5222}
5223
5224
5225/** Opcode 0x0f 0xa5. */
5226FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5227{
5228 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5229 IEMOP_HLP_MIN_386();
5230 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5231}
5232
5233
5234/** Opcode 0x0f 0xa8. */
5235FNIEMOP_DEF(iemOp_push_gs)
5236{
5237 IEMOP_MNEMONIC("push gs");
5238 IEMOP_HLP_MIN_386();
5239 IEMOP_HLP_NO_LOCK_PREFIX();
5240 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5241}
5242
5243
5244/** Opcode 0x0f 0xa9. */
5245FNIEMOP_DEF(iemOp_pop_gs)
5246{
5247 IEMOP_MNEMONIC("pop gs");
5248 IEMOP_HLP_MIN_386();
5249 IEMOP_HLP_NO_LOCK_PREFIX();
5250 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5251}
5252
5253
5254/** Opcode 0x0f 0xaa. */
5255FNIEMOP_STUB(iemOp_rsm);
5256//IEMOP_HLP_MIN_386();
5257
5258
5259/** Opcode 0x0f 0xab. */
5260FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5261{
5262 IEMOP_MNEMONIC("bts Ev,Gv");
5263 IEMOP_HLP_MIN_386();
5264 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5265}
5266
5267
5268/** Opcode 0x0f 0xac. */
5269FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5270{
5271 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5272 IEMOP_HLP_MIN_386();
5273 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5274}
5275
5276
5277/** Opcode 0x0f 0xad. */
5278FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5279{
5280 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5281 IEMOP_HLP_MIN_386();
5282 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5283}
5284
5285
5286/** Opcode 0x0f 0xae mem/0. */
5287FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5288{
5289 IEMOP_MNEMONIC("fxsave m512");
5290 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5291 return IEMOP_RAISE_INVALID_OPCODE();
5292
5293 IEM_MC_BEGIN(3, 1);
5294 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5295 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5296 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5299 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5300 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5301 IEM_MC_END();
5302 return VINF_SUCCESS;
5303}
5304
5305
5306/** Opcode 0x0f 0xae mem/1. */
5307FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5308{
5309 IEMOP_MNEMONIC("fxrstor m512");
5310 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5311 return IEMOP_RAISE_INVALID_OPCODE();
5312
5313 IEM_MC_BEGIN(3, 1);
5314 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5315 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5316 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5320 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5321 IEM_MC_END();
5322 return VINF_SUCCESS;
5323}
5324
5325
5326/** Opcode 0x0f 0xae mem/2. */
5327FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5328
5329/** Opcode 0x0f 0xae mem/3. */
5330FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5331
5332/** Opcode 0x0f 0xae mem/4. */
5333FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5334
5335/** Opcode 0x0f 0xae mem/5. */
5336FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5337
5338/** Opcode 0x0f 0xae mem/6. */
5339FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5340
5341/** Opcode 0x0f 0xae mem/7. */
5342FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5343
5344
5345/** Opcode 0x0f 0xae 11b/5. */
5346FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5347{
5348 IEMOP_MNEMONIC("lfence");
5349 IEMOP_HLP_NO_LOCK_PREFIX();
5350 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5351 return IEMOP_RAISE_INVALID_OPCODE();
5352
5353 IEM_MC_BEGIN(0, 0);
5354 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5355 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5356 else
5357 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5358 IEM_MC_ADVANCE_RIP();
5359 IEM_MC_END();
5360 return VINF_SUCCESS;
5361}
5362
5363
5364/** Opcode 0x0f 0xae 11b/6. */
5365FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5366{
5367 IEMOP_MNEMONIC("mfence");
5368 IEMOP_HLP_NO_LOCK_PREFIX();
5369 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5370 return IEMOP_RAISE_INVALID_OPCODE();
5371
5372 IEM_MC_BEGIN(0, 0);
5373 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5374 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5375 else
5376 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5377 IEM_MC_ADVANCE_RIP();
5378 IEM_MC_END();
5379 return VINF_SUCCESS;
5380}
5381
5382
5383/** Opcode 0x0f 0xae 11b/7. */
5384FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5385{
5386 IEMOP_MNEMONIC("sfence");
5387 IEMOP_HLP_NO_LOCK_PREFIX();
5388 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5389 return IEMOP_RAISE_INVALID_OPCODE();
5390
5391 IEM_MC_BEGIN(0, 0);
5392 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5393 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5394 else
5395 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5396 IEM_MC_ADVANCE_RIP();
5397 IEM_MC_END();
5398 return VINF_SUCCESS;
5399}
5400
5401
5402/** Opcode 0xf3 0x0f 0xae 11b/0. */
5403FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5404
5405/** Opcode 0xf3 0x0f 0xae 11b/1. */
5406FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5407
5408/** Opcode 0xf3 0x0f 0xae 11b/2. */
5409FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5410
5411/** Opcode 0xf3 0x0f 0xae 11b/3. */
5412FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5413
5414
5415/** Opcode 0x0f 0xae. */
5416FNIEMOP_DEF(iemOp_Grp15)
5417{
5418 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5420 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5421 {
5422 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5423 {
5424 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5425 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5426 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5427 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5428 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5429 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5430 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5431 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5433 }
5434 }
5435 else
5436 {
5437 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5438 {
5439 case 0:
5440 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5441 {
5442 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5443 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5444 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5445 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5446 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5447 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5448 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5449 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5451 }
5452 break;
5453
5454 case IEM_OP_PRF_REPZ:
5455 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5456 {
5457 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5458 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5459 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5460 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5461 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5462 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5463 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5464 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5466 }
5467 break;
5468
5469 default:
5470 return IEMOP_RAISE_INVALID_OPCODE();
5471 }
5472 }
5473}
5474
5475
5476/** Opcode 0x0f 0xaf. */
5477FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5478{
5479 IEMOP_MNEMONIC("imul Gv,Ev");
5480 IEMOP_HLP_MIN_386();
5481 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5482 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5483}
5484
5485
5486/** Opcode 0x0f 0xb0. */
5487FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5488{
5489 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5490 IEMOP_HLP_MIN_486();
5491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5492
5493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5494 {
5495 IEMOP_HLP_DONE_DECODING();
5496 IEM_MC_BEGIN(4, 0);
5497 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5498 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5499 IEM_MC_ARG(uint8_t, u8Src, 2);
5500 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5501
5502 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5503 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5504 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5505 IEM_MC_REF_EFLAGS(pEFlags);
5506 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5508 else
5509 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5510
5511 IEM_MC_ADVANCE_RIP();
5512 IEM_MC_END();
5513 }
5514 else
5515 {
5516 IEM_MC_BEGIN(4, 3);
5517 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5518 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5519 IEM_MC_ARG(uint8_t, u8Src, 2);
5520 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5522 IEM_MC_LOCAL(uint8_t, u8Al);
5523
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING();
5526 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5527 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5528 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5529 IEM_MC_FETCH_EFLAGS(EFlags);
5530 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5531 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5532 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5533 else
5534 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5535
5536 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5537 IEM_MC_COMMIT_EFLAGS(EFlags);
5538 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5539 IEM_MC_ADVANCE_RIP();
5540 IEM_MC_END();
5541 }
5542 return VINF_SUCCESS;
5543}
5544
5545/** Opcode 0x0f 0xb1. */
5546FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5547{
5548 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5549 IEMOP_HLP_MIN_486();
5550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5551
5552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5553 {
5554 IEMOP_HLP_DONE_DECODING();
5555 switch (pIemCpu->enmEffOpSize)
5556 {
5557 case IEMMODE_16BIT:
5558 IEM_MC_BEGIN(4, 0);
5559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5560 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5561 IEM_MC_ARG(uint16_t, u16Src, 2);
5562 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5563
5564 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5565 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5566 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5567 IEM_MC_REF_EFLAGS(pEFlags);
5568 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5569 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5570 else
5571 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5572
5573 IEM_MC_ADVANCE_RIP();
5574 IEM_MC_END();
5575 return VINF_SUCCESS;
5576
5577 case IEMMODE_32BIT:
5578 IEM_MC_BEGIN(4, 0);
5579 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5580 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5581 IEM_MC_ARG(uint32_t, u32Src, 2);
5582 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5583
5584 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5585 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5586 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5587 IEM_MC_REF_EFLAGS(pEFlags);
5588 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5589 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5590 else
5591 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5592
5593 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5594 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 return VINF_SUCCESS;
5598
5599 case IEMMODE_64BIT:
5600 IEM_MC_BEGIN(4, 0);
5601 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5602 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5603#ifdef RT_ARCH_X86
5604 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5605#else
5606 IEM_MC_ARG(uint64_t, u64Src, 2);
5607#endif
5608 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5609
5610 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5611 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5612 IEM_MC_REF_EFLAGS(pEFlags);
5613#ifdef RT_ARCH_X86
5614 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5615 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5616 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5617 else
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5619#else
5620 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5621 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5622 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5623 else
5624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5625#endif
5626
5627 IEM_MC_ADVANCE_RIP();
5628 IEM_MC_END();
5629 return VINF_SUCCESS;
5630
5631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5632 }
5633 }
5634 else
5635 {
5636 switch (pIemCpu->enmEffOpSize)
5637 {
5638 case IEMMODE_16BIT:
5639 IEM_MC_BEGIN(4, 3);
5640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5641 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5642 IEM_MC_ARG(uint16_t, u16Src, 2);
5643 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5645 IEM_MC_LOCAL(uint16_t, u16Ax);
5646
5647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5648 IEMOP_HLP_DONE_DECODING();
5649 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5650 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5651 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5652 IEM_MC_FETCH_EFLAGS(EFlags);
5653 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5654 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5655 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5656 else
5657 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5658
5659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5660 IEM_MC_COMMIT_EFLAGS(EFlags);
5661 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5662 IEM_MC_ADVANCE_RIP();
5663 IEM_MC_END();
5664 return VINF_SUCCESS;
5665
5666 case IEMMODE_32BIT:
5667 IEM_MC_BEGIN(4, 3);
5668 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5669 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5670 IEM_MC_ARG(uint32_t, u32Src, 2);
5671 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5673 IEM_MC_LOCAL(uint32_t, u32Eax);
5674
5675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5676 IEMOP_HLP_DONE_DECODING();
5677 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5678 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5679 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5680 IEM_MC_FETCH_EFLAGS(EFlags);
5681 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5682 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5683 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5684 else
5685 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5686
5687 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5688 IEM_MC_COMMIT_EFLAGS(EFlags);
5689 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 return VINF_SUCCESS;
5693
5694 case IEMMODE_64BIT:
5695 IEM_MC_BEGIN(4, 3);
5696 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5697 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5698#ifdef RT_ARCH_X86
5699 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5700#else
5701 IEM_MC_ARG(uint64_t, u64Src, 2);
5702#endif
5703 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5705 IEM_MC_LOCAL(uint64_t, u64Rax);
5706
5707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5708 IEMOP_HLP_DONE_DECODING();
5709 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5710 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5711 IEM_MC_FETCH_EFLAGS(EFlags);
5712 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5713#ifdef RT_ARCH_X86
5714 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5715 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5716 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5717 else
5718 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5719#else
5720 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5721 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5722 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5723 else
5724 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5725#endif
5726
5727 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5728 IEM_MC_COMMIT_EFLAGS(EFlags);
5729 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5730 IEM_MC_ADVANCE_RIP();
5731 IEM_MC_END();
5732 return VINF_SUCCESS;
5733
5734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5735 }
5736 }
5737}
5738
5739
5740FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5741{
5742 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5743 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5744
5745 switch (pIemCpu->enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(5, 1);
5749 IEM_MC_ARG(uint16_t, uSel, 0);
5750 IEM_MC_ARG(uint16_t, offSeg, 1);
5751 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5752 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5753 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5754 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5758 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5759 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5760 IEM_MC_END();
5761 return VINF_SUCCESS;
5762
5763 case IEMMODE_32BIT:
5764 IEM_MC_BEGIN(5, 1);
5765 IEM_MC_ARG(uint16_t, uSel, 0);
5766 IEM_MC_ARG(uint32_t, offSeg, 1);
5767 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5768 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5769 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5770 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5774 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5775 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5776 IEM_MC_END();
5777 return VINF_SUCCESS;
5778
5779 case IEMMODE_64BIT:
5780 IEM_MC_BEGIN(5, 1);
5781 IEM_MC_ARG(uint16_t, uSel, 0);
5782 IEM_MC_ARG(uint64_t, offSeg, 1);
5783 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5784 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5785 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5790 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5791 else
5792 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5793 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5794 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5795 IEM_MC_END();
5796 return VINF_SUCCESS;
5797
5798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5799 }
5800}
5801
5802
5803/** Opcode 0x0f 0xb2. */
5804FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5805{
5806 IEMOP_MNEMONIC("lss Gv,Mp");
5807 IEMOP_HLP_MIN_386();
5808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5810 return IEMOP_RAISE_INVALID_OPCODE();
5811 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5812}
5813
5814
5815/** Opcode 0x0f 0xb3. */
5816FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5817{
5818 IEMOP_MNEMONIC("btr Ev,Gv");
5819 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5820}
5821
5822
5823/** Opcode 0x0f 0xb4. */
5824FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5825{
5826 IEMOP_MNEMONIC("lfs Gv,Mp");
5827 IEMOP_HLP_MIN_386();
5828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5830 return IEMOP_RAISE_INVALID_OPCODE();
5831 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5832}
5833
5834
5835/** Opcode 0x0f 0xb5. */
5836FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5837{
5838 IEMOP_MNEMONIC("lgs Gv,Mp");
5839 IEMOP_HLP_MIN_386();
5840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5842 return IEMOP_RAISE_INVALID_OPCODE();
5843 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5844}
5845
5846
5847/** Opcode 0x0f 0xb6. */
5848FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5849{
5850 IEMOP_MNEMONIC("movzx Gv,Eb");
5851 IEMOP_HLP_MIN_386();
5852
5853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5854 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5855
5856 /*
5857 * If rm is denoting a register, no more instruction bytes.
5858 */
5859 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5860 {
5861 switch (pIemCpu->enmEffOpSize)
5862 {
5863 case IEMMODE_16BIT:
5864 IEM_MC_BEGIN(0, 1);
5865 IEM_MC_LOCAL(uint16_t, u16Value);
5866 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5867 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5868 IEM_MC_ADVANCE_RIP();
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_32BIT:
5873 IEM_MC_BEGIN(0, 1);
5874 IEM_MC_LOCAL(uint32_t, u32Value);
5875 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5876 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5877 IEM_MC_ADVANCE_RIP();
5878 IEM_MC_END();
5879 return VINF_SUCCESS;
5880
5881 case IEMMODE_64BIT:
5882 IEM_MC_BEGIN(0, 1);
5883 IEM_MC_LOCAL(uint64_t, u64Value);
5884 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5885 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5886 IEM_MC_ADVANCE_RIP();
5887 IEM_MC_END();
5888 return VINF_SUCCESS;
5889
5890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5891 }
5892 }
5893 else
5894 {
5895 /*
5896 * We're loading a register from memory.
5897 */
5898 switch (pIemCpu->enmEffOpSize)
5899 {
5900 case IEMMODE_16BIT:
5901 IEM_MC_BEGIN(0, 2);
5902 IEM_MC_LOCAL(uint16_t, u16Value);
5903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5905 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5906 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 return VINF_SUCCESS;
5910
5911 case IEMMODE_32BIT:
5912 IEM_MC_BEGIN(0, 2);
5913 IEM_MC_LOCAL(uint32_t, u32Value);
5914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5916 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5917 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 return VINF_SUCCESS;
5921
5922 case IEMMODE_64BIT:
5923 IEM_MC_BEGIN(0, 2);
5924 IEM_MC_LOCAL(uint64_t, u64Value);
5925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5927 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5928 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5929 IEM_MC_ADVANCE_RIP();
5930 IEM_MC_END();
5931 return VINF_SUCCESS;
5932
5933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5934 }
5935 }
5936}
5937
5938
5939/** Opcode 0x0f 0xb7. */
5940FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5941{
5942 IEMOP_MNEMONIC("movzx Gv,Ew");
5943 IEMOP_HLP_MIN_386();
5944
5945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5946 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5947
5948 /** @todo Not entirely sure how the operand size prefix is handled here,
5949 * assuming that it will be ignored. Would be nice to have a few
5950 * test for this. */
5951 /*
5952 * If rm is denoting a register, no more instruction bytes.
5953 */
5954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5955 {
5956 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5957 {
5958 IEM_MC_BEGIN(0, 1);
5959 IEM_MC_LOCAL(uint32_t, u32Value);
5960 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5961 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5962 IEM_MC_ADVANCE_RIP();
5963 IEM_MC_END();
5964 }
5965 else
5966 {
5967 IEM_MC_BEGIN(0, 1);
5968 IEM_MC_LOCAL(uint64_t, u64Value);
5969 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5970 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5971 IEM_MC_ADVANCE_RIP();
5972 IEM_MC_END();
5973 }
5974 }
5975 else
5976 {
5977 /*
5978 * We're loading a register from memory.
5979 */
5980 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5981 {
5982 IEM_MC_BEGIN(0, 2);
5983 IEM_MC_LOCAL(uint32_t, u32Value);
5984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5986 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5987 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5988 IEM_MC_ADVANCE_RIP();
5989 IEM_MC_END();
5990 }
5991 else
5992 {
5993 IEM_MC_BEGIN(0, 2);
5994 IEM_MC_LOCAL(uint64_t, u64Value);
5995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5997 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5998 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5999 IEM_MC_ADVANCE_RIP();
6000 IEM_MC_END();
6001 }
6002 }
6003 return VINF_SUCCESS;
6004}
6005
6006
6007/** Opcode 0x0f 0xb8. */
6008FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6009
6010
6011/** Opcode 0x0f 0xb9. */
6012FNIEMOP_DEF(iemOp_Grp10)
6013{
6014 Log(("iemOp_Grp10 -> #UD\n"));
6015 return IEMOP_RAISE_INVALID_OPCODE();
6016}
6017
6018
6019/** Opcode 0x0f 0xba. */
6020FNIEMOP_DEF(iemOp_Grp8)
6021{
6022 IEMOP_HLP_MIN_386();
6023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6024 PCIEMOPBINSIZES pImpl;
6025 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6026 {
6027 case 0: case 1: case 2: case 3:
6028 return IEMOP_RAISE_INVALID_OPCODE();
6029 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
6030 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
6031 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
6032 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
6033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6034 }
6035 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6036
6037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6038 {
6039 /* register destination. */
6040 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6041 IEMOP_HLP_NO_LOCK_PREFIX();
6042
6043 switch (pIemCpu->enmEffOpSize)
6044 {
6045 case IEMMODE_16BIT:
6046 IEM_MC_BEGIN(3, 0);
6047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6048 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6050
6051 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6052 IEM_MC_REF_EFLAGS(pEFlags);
6053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6054
6055 IEM_MC_ADVANCE_RIP();
6056 IEM_MC_END();
6057 return VINF_SUCCESS;
6058
6059 case IEMMODE_32BIT:
6060 IEM_MC_BEGIN(3, 0);
6061 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6062 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6064
6065 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6066 IEM_MC_REF_EFLAGS(pEFlags);
6067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6068
6069 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6070 IEM_MC_ADVANCE_RIP();
6071 IEM_MC_END();
6072 return VINF_SUCCESS;
6073
6074 case IEMMODE_64BIT:
6075 IEM_MC_BEGIN(3, 0);
6076 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6077 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6079
6080 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6081 IEM_MC_REF_EFLAGS(pEFlags);
6082 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6083
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 return VINF_SUCCESS;
6087
6088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6089 }
6090 }
6091 else
6092 {
6093 /* memory destination. */
6094
6095 uint32_t fAccess;
6096 if (pImpl->pfnLockedU16)
6097 fAccess = IEM_ACCESS_DATA_RW;
6098 else /* BT */
6099 {
6100 IEMOP_HLP_NO_LOCK_PREFIX();
6101 fAccess = IEM_ACCESS_DATA_R;
6102 }
6103
6104 /** @todo test negative bit offsets! */
6105 switch (pIemCpu->enmEffOpSize)
6106 {
6107 case IEMMODE_16BIT:
6108 IEM_MC_BEGIN(3, 1);
6109 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6110 IEM_MC_ARG(uint16_t, u16Src, 1);
6111 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6113
6114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6115 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6116 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6117 IEM_MC_FETCH_EFLAGS(EFlags);
6118 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6119 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6121 else
6122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6123 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6124
6125 IEM_MC_COMMIT_EFLAGS(EFlags);
6126 IEM_MC_ADVANCE_RIP();
6127 IEM_MC_END();
6128 return VINF_SUCCESS;
6129
6130 case IEMMODE_32BIT:
6131 IEM_MC_BEGIN(3, 1);
6132 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6133 IEM_MC_ARG(uint32_t, u32Src, 1);
6134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6136
6137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6138 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6139 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6140 IEM_MC_FETCH_EFLAGS(EFlags);
6141 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6142 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6144 else
6145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6147
6148 IEM_MC_COMMIT_EFLAGS(EFlags);
6149 IEM_MC_ADVANCE_RIP();
6150 IEM_MC_END();
6151 return VINF_SUCCESS;
6152
6153 case IEMMODE_64BIT:
6154 IEM_MC_BEGIN(3, 1);
6155 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6156 IEM_MC_ARG(uint64_t, u64Src, 1);
6157 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6159
6160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6161 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6162 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6163 IEM_MC_FETCH_EFLAGS(EFlags);
6164 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6165 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6167 else
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6169 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6170
6171 IEM_MC_COMMIT_EFLAGS(EFlags);
6172 IEM_MC_ADVANCE_RIP();
6173 IEM_MC_END();
6174 return VINF_SUCCESS;
6175
6176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6177 }
6178 }
6179
6180}
6181
6182
6183/** Opcode 0x0f 0xbb. */
6184FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6185{
6186 IEMOP_MNEMONIC("btc Ev,Gv");
6187 IEMOP_HLP_MIN_386();
6188 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6189}
6190
6191
6192/** Opcode 0x0f 0xbc. */
6193FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6194{
6195 IEMOP_MNEMONIC("bsf Gv,Ev");
6196 IEMOP_HLP_MIN_386();
6197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6198 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6199}
6200
6201
6202/** Opcode 0x0f 0xbd. */
6203FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6204{
6205 IEMOP_MNEMONIC("bsr Gv,Ev");
6206 IEMOP_HLP_MIN_386();
6207 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6208 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6209}
6210
6211
6212/** Opcode 0x0f 0xbe. */
6213FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6214{
6215 IEMOP_MNEMONIC("movsx Gv,Eb");
6216 IEMOP_HLP_MIN_386();
6217
6218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6219 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6220
6221 /*
6222 * If rm is denoting a register, no more instruction bytes.
6223 */
6224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6225 {
6226 switch (pIemCpu->enmEffOpSize)
6227 {
6228 case IEMMODE_16BIT:
6229 IEM_MC_BEGIN(0, 1);
6230 IEM_MC_LOCAL(uint16_t, u16Value);
6231 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6232 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6233 IEM_MC_ADVANCE_RIP();
6234 IEM_MC_END();
6235 return VINF_SUCCESS;
6236
6237 case IEMMODE_32BIT:
6238 IEM_MC_BEGIN(0, 1);
6239 IEM_MC_LOCAL(uint32_t, u32Value);
6240 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6241 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6242 IEM_MC_ADVANCE_RIP();
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_64BIT:
6247 IEM_MC_BEGIN(0, 1);
6248 IEM_MC_LOCAL(uint64_t, u64Value);
6249 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6250 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 return VINF_SUCCESS;
6254
6255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6256 }
6257 }
6258 else
6259 {
6260 /*
6261 * We're loading a register from memory.
6262 */
6263 switch (pIemCpu->enmEffOpSize)
6264 {
6265 case IEMMODE_16BIT:
6266 IEM_MC_BEGIN(0, 2);
6267 IEM_MC_LOCAL(uint16_t, u16Value);
6268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6270 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6271 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6272 IEM_MC_ADVANCE_RIP();
6273 IEM_MC_END();
6274 return VINF_SUCCESS;
6275
6276 case IEMMODE_32BIT:
6277 IEM_MC_BEGIN(0, 2);
6278 IEM_MC_LOCAL(uint32_t, u32Value);
6279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6281 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6282 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6283 IEM_MC_ADVANCE_RIP();
6284 IEM_MC_END();
6285 return VINF_SUCCESS;
6286
6287 case IEMMODE_64BIT:
6288 IEM_MC_BEGIN(0, 2);
6289 IEM_MC_LOCAL(uint64_t, u64Value);
6290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6292 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6293 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6294 IEM_MC_ADVANCE_RIP();
6295 IEM_MC_END();
6296 return VINF_SUCCESS;
6297
6298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6299 }
6300 }
6301}
6302
6303
6304/** Opcode 0x0f 0xbf. */
6305FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6306{
6307 IEMOP_MNEMONIC("movsx Gv,Ew");
6308 IEMOP_HLP_MIN_386();
6309
6310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6311 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6312
6313 /** @todo Not entirely sure how the operand size prefix is handled here,
6314 * assuming that it will be ignored. Would be nice to have a few
6315 * test for this. */
6316 /*
6317 * If rm is denoting a register, no more instruction bytes.
6318 */
6319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6320 {
6321 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6322 {
6323 IEM_MC_BEGIN(0, 1);
6324 IEM_MC_LOCAL(uint32_t, u32Value);
6325 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 }
6330 else
6331 {
6332 IEM_MC_BEGIN(0, 1);
6333 IEM_MC_LOCAL(uint64_t, u64Value);
6334 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6335 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6336 IEM_MC_ADVANCE_RIP();
6337 IEM_MC_END();
6338 }
6339 }
6340 else
6341 {
6342 /*
6343 * We're loading a register from memory.
6344 */
6345 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6346 {
6347 IEM_MC_BEGIN(0, 2);
6348 IEM_MC_LOCAL(uint32_t, u32Value);
6349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6351 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6352 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6353 IEM_MC_ADVANCE_RIP();
6354 IEM_MC_END();
6355 }
6356 else
6357 {
6358 IEM_MC_BEGIN(0, 2);
6359 IEM_MC_LOCAL(uint64_t, u64Value);
6360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6362 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6363 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6364 IEM_MC_ADVANCE_RIP();
6365 IEM_MC_END();
6366 }
6367 }
6368 return VINF_SUCCESS;
6369}
6370
6371
6372/** Opcode 0x0f 0xc0. */
6373FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6374{
6375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6376 IEMOP_HLP_MIN_486();
6377 IEMOP_MNEMONIC("xadd Eb,Gb");
6378
6379 /*
6380 * If rm is denoting a register, no more instruction bytes.
6381 */
6382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6383 {
6384 IEMOP_HLP_NO_LOCK_PREFIX();
6385
6386 IEM_MC_BEGIN(3, 0);
6387 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6388 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6390
6391 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6392 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6393 IEM_MC_REF_EFLAGS(pEFlags);
6394 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6395
6396 IEM_MC_ADVANCE_RIP();
6397 IEM_MC_END();
6398 }
6399 else
6400 {
6401 /*
6402 * We're accessing memory.
6403 */
6404 IEM_MC_BEGIN(3, 3);
6405 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6406 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6407 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6408 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6410
6411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6412 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6413 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6414 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6415 IEM_MC_FETCH_EFLAGS(EFlags);
6416 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6417 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6418 else
6419 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6420
6421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6422 IEM_MC_COMMIT_EFLAGS(EFlags);
6423 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 return VINF_SUCCESS;
6427 }
6428 return VINF_SUCCESS;
6429}
6430
6431
6432/** Opcode 0x0f 0xc1. */
6433FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6434{
6435 IEMOP_MNEMONIC("xadd Ev,Gv");
6436 IEMOP_HLP_MIN_486();
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438
6439 /*
6440 * If rm is denoting a register, no more instruction bytes.
6441 */
6442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6443 {
6444 IEMOP_HLP_NO_LOCK_PREFIX();
6445
6446 switch (pIemCpu->enmEffOpSize)
6447 {
6448 case IEMMODE_16BIT:
6449 IEM_MC_BEGIN(3, 0);
6450 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6451 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6452 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6453
6454 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6455 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6456 IEM_MC_REF_EFLAGS(pEFlags);
6457 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6458
6459 IEM_MC_ADVANCE_RIP();
6460 IEM_MC_END();
6461 return VINF_SUCCESS;
6462
6463 case IEMMODE_32BIT:
6464 IEM_MC_BEGIN(3, 0);
6465 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6466 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6468
6469 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6470 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6471 IEM_MC_REF_EFLAGS(pEFlags);
6472 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6473
6474 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6475 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6476 IEM_MC_ADVANCE_RIP();
6477 IEM_MC_END();
6478 return VINF_SUCCESS;
6479
6480 case IEMMODE_64BIT:
6481 IEM_MC_BEGIN(3, 0);
6482 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6483 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6484 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6485
6486 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6487 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6488 IEM_MC_REF_EFLAGS(pEFlags);
6489 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6490
6491 IEM_MC_ADVANCE_RIP();
6492 IEM_MC_END();
6493 return VINF_SUCCESS;
6494
6495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6496 }
6497 }
6498 else
6499 {
6500 /*
6501 * We're accessing memory.
6502 */
6503 switch (pIemCpu->enmEffOpSize)
6504 {
6505 case IEMMODE_16BIT:
6506 IEM_MC_BEGIN(3, 3);
6507 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6508 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6509 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6510 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6512
6513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6514 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6515 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6516 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6517 IEM_MC_FETCH_EFLAGS(EFlags);
6518 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6519 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6520 else
6521 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6522
6523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6524 IEM_MC_COMMIT_EFLAGS(EFlags);
6525 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 return VINF_SUCCESS;
6529
6530 case IEMMODE_32BIT:
6531 IEM_MC_BEGIN(3, 3);
6532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6533 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6534 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6535 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6537
6538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6539 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6540 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6541 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6542 IEM_MC_FETCH_EFLAGS(EFlags);
6543 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6544 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6545 else
6546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6547
6548 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6549 IEM_MC_COMMIT_EFLAGS(EFlags);
6550 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6551 IEM_MC_ADVANCE_RIP();
6552 IEM_MC_END();
6553 return VINF_SUCCESS;
6554
6555 case IEMMODE_64BIT:
6556 IEM_MC_BEGIN(3, 3);
6557 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6558 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6559 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6560 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6562
6563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6564 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6565 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6566 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6567 IEM_MC_FETCH_EFLAGS(EFlags);
6568 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6570 else
6571 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6572
6573 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6574 IEM_MC_COMMIT_EFLAGS(EFlags);
6575 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6576 IEM_MC_ADVANCE_RIP();
6577 IEM_MC_END();
6578 return VINF_SUCCESS;
6579
6580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6581 }
6582 }
6583}
6584
6585/** Opcode 0x0f 0xc2. */
6586FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6587
6588
6589/** Opcode 0x0f 0xc3. */
6590#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
6591FNIEMOP_DEF(iemOp_movnti_My_Gy)
6592{
6593 IEMOP_MNEMONIC("movnti My,Gy");
6594
6595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6596
6597 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6598 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6599 {
6600 switch (pIemCpu->enmEffOpSize)
6601 {
6602 case IEMMODE_32BIT:
6603 IEM_MC_BEGIN(0, 2);
6604 IEM_MC_LOCAL(uint32_t, u32Value);
6605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6606
6607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6609 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6610 return IEMOP_RAISE_INVALID_OPCODE();
6611
6612 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6613 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
6614 IEM_MC_ADVANCE_RIP();
6615 IEM_MC_END();
6616 break;
6617
6618 case IEMMODE_64BIT:
6619 IEM_MC_BEGIN(0, 2);
6620 IEM_MC_LOCAL(uint64_t, u64Value);
6621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6622
6623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6625 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6626 return IEMOP_RAISE_INVALID_OPCODE();
6627
6628 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6629 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
6630 IEM_MC_ADVANCE_RIP();
6631 IEM_MC_END();
6632 break;
6633
6634 case IEMMODE_16BIT:
6635 /** @todo check this form. */
6636 return IEMOP_RAISE_INVALID_OPCODE();
6637 }
6638 }
6639 else
6640 return IEMOP_RAISE_INVALID_OPCODE();
6641 return VINF_SUCCESS;
6642}
6643#else
6644FNIEMOP_STUB(iemOp_movnti_My_Gy); // solaris 10 uses this in hat_pte_zero().
6645#endif
6646
6647
6648/** Opcode 0x0f 0xc4. */
6649FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6650
6651/** Opcode 0x0f 0xc5. */
6652FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6653
6654/** Opcode 0x0f 0xc6. */
6655FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6656
6657
6658/** Opcode 0x0f 0xc7 !11/1. */
6659FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6660{
6661 IEMOP_MNEMONIC("cmpxchg8b Mq");
6662
6663 IEM_MC_BEGIN(4, 3);
6664 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6665 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6666 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6667 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6668 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6669 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6671
6672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6673 IEMOP_HLP_DONE_DECODING();
6674 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6675
6676 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6677 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6678 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6679
6680 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6681 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6682 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6683
6684 IEM_MC_FETCH_EFLAGS(EFlags);
6685 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6686 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6687 else
6688 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6689
6690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6691 IEM_MC_COMMIT_EFLAGS(EFlags);
6692 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6693 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6694 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6695 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6696 IEM_MC_ENDIF();
6697 IEM_MC_ADVANCE_RIP();
6698
6699 IEM_MC_END();
6700 return VINF_SUCCESS;
6701}
6702
6703
6704/** Opcode REX.W 0x0f 0xc7 !11/1. */
6705FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6706
6707/** Opcode 0x0f 0xc7 11/6. */
6708FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6709
6710/** Opcode 0x0f 0xc7 !11/6. */
6711FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6712
6713/** Opcode 0x66 0x0f 0xc7 !11/6. */
6714FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6715
6716/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6717FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6718
6719/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6720FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6721
6722
6723/** Opcode 0x0f 0xc7. */
6724FNIEMOP_DEF(iemOp_Grp9)
6725{
6726 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6728 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6729 {
6730 case 0: case 2: case 3: case 4: case 5:
6731 return IEMOP_RAISE_INVALID_OPCODE();
6732 case 1:
6733 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6734 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6735 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6736 return IEMOP_RAISE_INVALID_OPCODE();
6737 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6738 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6739 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6740 case 6:
6741 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6742 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6743 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6744 {
6745 case 0:
6746 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6747 case IEM_OP_PRF_SIZE_OP:
6748 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6749 case IEM_OP_PRF_REPZ:
6750 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6751 default:
6752 return IEMOP_RAISE_INVALID_OPCODE();
6753 }
6754 case 7:
6755 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6756 {
6757 case 0:
6758 case IEM_OP_PRF_REPZ:
6759 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6760 default:
6761 return IEMOP_RAISE_INVALID_OPCODE();
6762 }
6763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6764 }
6765}
6766
6767
6768/**
6769 * Common 'bswap register' helper.
6770 */
6771FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6772{
6773 IEMOP_HLP_NO_LOCK_PREFIX();
6774 switch (pIemCpu->enmEffOpSize)
6775 {
6776 case IEMMODE_16BIT:
6777 IEM_MC_BEGIN(1, 0);
6778 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6779 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6780 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 return VINF_SUCCESS;
6784
6785 case IEMMODE_32BIT:
6786 IEM_MC_BEGIN(1, 0);
6787 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6788 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6790 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6791 IEM_MC_ADVANCE_RIP();
6792 IEM_MC_END();
6793 return VINF_SUCCESS;
6794
6795 case IEMMODE_64BIT:
6796 IEM_MC_BEGIN(1, 0);
6797 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6798 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6799 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6800 IEM_MC_ADVANCE_RIP();
6801 IEM_MC_END();
6802 return VINF_SUCCESS;
6803
6804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6805 }
6806}
6807
6808
6809/** Opcode 0x0f 0xc8. */
6810FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6811{
6812 IEMOP_MNEMONIC("bswap rAX/r8");
6813 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6814 prefix. REX.B is the correct prefix it appears. For a parallel
6815 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6816 IEMOP_HLP_MIN_486();
6817 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6818}
6819
6820
6821/** Opcode 0x0f 0xc9. */
6822FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6823{
6824 IEMOP_MNEMONIC("bswap rCX/r9");
6825 IEMOP_HLP_MIN_486();
6826 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6827}
6828
6829
6830/** Opcode 0x0f 0xca. */
6831FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6832{
6833 IEMOP_MNEMONIC("bswap rDX/r9");
6834 IEMOP_HLP_MIN_486();
6835 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6836}
6837
6838
6839/** Opcode 0x0f 0xcb. */
6840FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6841{
6842 IEMOP_MNEMONIC("bswap rBX/r9");
6843 IEMOP_HLP_MIN_486();
6844 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6845}
6846
6847
6848/** Opcode 0x0f 0xcc. */
6849FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6850{
6851 IEMOP_MNEMONIC("bswap rSP/r12");
6852 IEMOP_HLP_MIN_486();
6853 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6854}
6855
6856
6857/** Opcode 0x0f 0xcd. */
6858FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6859{
6860 IEMOP_MNEMONIC("bswap rBP/r13");
6861 IEMOP_HLP_MIN_486();
6862 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6863}
6864
6865
6866/** Opcode 0x0f 0xce. */
6867FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6868{
6869 IEMOP_MNEMONIC("bswap rSI/r14");
6870 IEMOP_HLP_MIN_486();
6871 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6872}
6873
6874
6875/** Opcode 0x0f 0xcf. */
6876FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6877{
6878 IEMOP_MNEMONIC("bswap rDI/r15");
6879 IEMOP_HLP_MIN_486();
6880 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6881}
6882
6883
6884
6885/** Opcode 0x0f 0xd0. */
6886FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6887/** Opcode 0x0f 0xd1. */
6888FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6889/** Opcode 0x0f 0xd2. */
6890FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6891/** Opcode 0x0f 0xd3. */
6892FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6893/** Opcode 0x0f 0xd4. */
6894FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6895/** Opcode 0x0f 0xd5. */
6896FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6897/** Opcode 0x0f 0xd6. */
6898FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6899
6900
6901/** Opcode 0x0f 0xd7. */
6902FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6903{
6904 /* Docs says register only. */
6905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6906 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6907 return IEMOP_RAISE_INVALID_OPCODE();
6908
6909 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6910 /** @todo testcase: Check that the instruction implicitly clears the high
6911 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6912 * and opcode modifications are made to work with the whole width (not
6913 * just 128). */
6914 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6915 {
6916 case IEM_OP_PRF_SIZE_OP: /* SSE */
6917 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6918 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6919 IEM_MC_BEGIN(2, 0);
6920 IEM_MC_ARG(uint64_t *, pDst, 0);
6921 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6922 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6923 IEM_MC_PREPARE_SSE_USAGE();
6924 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6925 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6926 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6927 IEM_MC_ADVANCE_RIP();
6928 IEM_MC_END();
6929 return VINF_SUCCESS;
6930
6931 case 0: /* MMX */
6932 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6933 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6934 IEM_MC_BEGIN(2, 0);
6935 IEM_MC_ARG(uint64_t *, pDst, 0);
6936 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6937 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6938 IEM_MC_PREPARE_FPU_USAGE();
6939 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6940 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6941 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6942 IEM_MC_ADVANCE_RIP();
6943 IEM_MC_END();
6944 return VINF_SUCCESS;
6945
6946 default:
6947 return IEMOP_RAISE_INVALID_OPCODE();
6948 }
6949}
6950
6951
6952/** Opcode 0x0f 0xd8. */
6953FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6954/** Opcode 0x0f 0xd9. */
6955FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6956/** Opcode 0x0f 0xda. */
6957FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6958/** Opcode 0x0f 0xdb. */
6959FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6960/** Opcode 0x0f 0xdc. */
6961FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6962/** Opcode 0x0f 0xdd. */
6963FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6964/** Opcode 0x0f 0xde. */
6965FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6966/** Opcode 0x0f 0xdf. */
6967FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6968/** Opcode 0x0f 0xe0. */
6969FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6970/** Opcode 0x0f 0xe1. */
6971FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6972/** Opcode 0x0f 0xe2. */
6973FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6974/** Opcode 0x0f 0xe3. */
6975FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6976/** Opcode 0x0f 0xe4. */
6977FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6978/** Opcode 0x0f 0xe5. */
6979FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6980/** Opcode 0x0f 0xe6. */
6981FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6982
6983
6984/** Opcode 0x0f 0xe7. */
6985#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
6986FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
6987{
6988 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
6989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6990 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6991 {
6992 /*
6993 * Register, memory.
6994 */
6995/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
6996 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6997 {
6998
6999 case IEM_OP_PRF_SIZE_OP: /* SSE */
7000 IEM_MC_BEGIN(0, 2);
7001 IEM_MC_LOCAL(uint128_t, uSrc);
7002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7003
7004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7006 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7007 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7008
7009 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7010 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
7011
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 break;
7015
7016 case 0: /* MMX */
7017 IEM_MC_BEGIN(0, 2);
7018 IEM_MC_LOCAL(uint64_t, uSrc);
7019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7020
7021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7023 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7024 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7025
7026 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7027 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
7028
7029 IEM_MC_ADVANCE_RIP();
7030 IEM_MC_END();
7031 break;
7032
7033 default:
7034 return IEMOP_RAISE_INVALID_OPCODE();
7035 }
7036 }
7037 /* The register, register encoding is invalid. */
7038 else
7039 return IEMOP_RAISE_INVALID_OPCODE();
7040 return VINF_SUCCESS;
7041}
7042#else
7043FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
7044#endif
7045
7046
7047/** Opcode 0x0f 0xe8. */
7048FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7049/** Opcode 0x0f 0xe9. */
7050FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7051/** Opcode 0x0f 0xea. */
7052FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7053/** Opcode 0x0f 0xeb. */
7054FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7055/** Opcode 0x0f 0xec. */
7056FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7057/** Opcode 0x0f 0xed. */
7058FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7059/** Opcode 0x0f 0xee. */
7060FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7061
7062
7063/** Opcode 0x0f 0xef. */
7064FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7065{
7066 IEMOP_MNEMONIC("pxor");
7067 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7068}
7069
7070
7071/** Opcode 0x0f 0xf0. */
7072FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7073/** Opcode 0x0f 0xf1. */
7074FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7075/** Opcode 0x0f 0xf2. */
7076FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7077/** Opcode 0x0f 0xf3. */
7078FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7079/** Opcode 0x0f 0xf4. */
7080FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7081/** Opcode 0x0f 0xf5. */
7082FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7083/** Opcode 0x0f 0xf6. */
7084FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7085/** Opcode 0x0f 0xf7. */
7086FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7087/** Opcode 0x0f 0xf8. */
7088FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7089/** Opcode 0x0f 0xf9. */
7090FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7091/** Opcode 0x0f 0xfa. */
7092FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7093/** Opcode 0x0f 0xfb. */
7094FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7095/** Opcode 0x0f 0xfc. */
7096FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7097/** Opcode 0x0f 0xfd. */
7098FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7099/** Opcode 0x0f 0xfe. */
7100FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7101
7102
7103const PFNIEMOP g_apfnTwoByteMap[256] =
7104{
7105 /* 0x00 */ iemOp_Grp6,
7106 /* 0x01 */ iemOp_Grp7,
7107 /* 0x02 */ iemOp_lar_Gv_Ew,
7108 /* 0x03 */ iemOp_lsl_Gv_Ew,
7109 /* 0x04 */ iemOp_Invalid,
7110 /* 0x05 */ iemOp_syscall,
7111 /* 0x06 */ iemOp_clts,
7112 /* 0x07 */ iemOp_sysret,
7113 /* 0x08 */ iemOp_invd,
7114 /* 0x09 */ iemOp_wbinvd,
7115 /* 0x0a */ iemOp_Invalid,
7116 /* 0x0b */ iemOp_ud2,
7117 /* 0x0c */ iemOp_Invalid,
7118 /* 0x0d */ iemOp_nop_Ev_GrpP,
7119 /* 0x0e */ iemOp_femms,
7120 /* 0x0f */ iemOp_3Dnow,
7121 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7122 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7123 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7124 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7125 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7126 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7127 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7128 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7129 /* 0x18 */ iemOp_prefetch_Grp16,
7130 /* 0x19 */ iemOp_nop_Ev,
7131 /* 0x1a */ iemOp_nop_Ev,
7132 /* 0x1b */ iemOp_nop_Ev,
7133 /* 0x1c */ iemOp_nop_Ev,
7134 /* 0x1d */ iemOp_nop_Ev,
7135 /* 0x1e */ iemOp_nop_Ev,
7136 /* 0x1f */ iemOp_nop_Ev,
7137 /* 0x20 */ iemOp_mov_Rd_Cd,
7138 /* 0x21 */ iemOp_mov_Rd_Dd,
7139 /* 0x22 */ iemOp_mov_Cd_Rd,
7140 /* 0x23 */ iemOp_mov_Dd_Rd,
7141 /* 0x24 */ iemOp_mov_Rd_Td,
7142 /* 0x25 */ iemOp_Invalid,
7143 /* 0x26 */ iemOp_mov_Td_Rd,
7144 /* 0x27 */ iemOp_Invalid,
7145 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7146 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7147 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7148 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7149 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7150 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7151 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7152 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7153 /* 0x30 */ iemOp_wrmsr,
7154 /* 0x31 */ iemOp_rdtsc,
7155 /* 0x32 */ iemOp_rdmsr,
7156 /* 0x33 */ iemOp_rdpmc,
7157 /* 0x34 */ iemOp_sysenter,
7158 /* 0x35 */ iemOp_sysexit,
7159 /* 0x36 */ iemOp_Invalid,
7160 /* 0x37 */ iemOp_getsec,
7161 /* 0x38 */ iemOp_3byte_Esc_A4,
7162 /* 0x39 */ iemOp_Invalid,
7163 /* 0x3a */ iemOp_3byte_Esc_A5,
7164 /* 0x3b */ iemOp_Invalid,
7165 /* 0x3c */ iemOp_Invalid,
7166 /* 0x3d */ iemOp_Invalid,
7167 /* 0x3e */ iemOp_Invalid,
7168 /* 0x3f */ iemOp_Invalid,
7169 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7170 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7171 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7172 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7173 /* 0x44 */ iemOp_cmove_Gv_Ev,
7174 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7175 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7176 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7177 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7178 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7179 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7180 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7181 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7182 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7183 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7184 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7185 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7186 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7187 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7188 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7189 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7190 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7191 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7192 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7193 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7194 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7195 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7196 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7197 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7198 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7199 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7200 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7201 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7202 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7203 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7204 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7205 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7206 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7207 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7208 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7209 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7210 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7211 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7212 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7213 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7214 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7215 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7216 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7217 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7218 /* 0x71 */ iemOp_Grp12,
7219 /* 0x72 */ iemOp_Grp13,
7220 /* 0x73 */ iemOp_Grp14,
7221 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7222 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7223 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7224 /* 0x77 */ iemOp_emms,
7225 /* 0x78 */ iemOp_vmread_AmdGrp17,
7226 /* 0x79 */ iemOp_vmwrite,
7227 /* 0x7a */ iemOp_Invalid,
7228 /* 0x7b */ iemOp_Invalid,
7229 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7230 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7231 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7232 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7233 /* 0x80 */ iemOp_jo_Jv,
7234 /* 0x81 */ iemOp_jno_Jv,
7235 /* 0x82 */ iemOp_jc_Jv,
7236 /* 0x83 */ iemOp_jnc_Jv,
7237 /* 0x84 */ iemOp_je_Jv,
7238 /* 0x85 */ iemOp_jne_Jv,
7239 /* 0x86 */ iemOp_jbe_Jv,
7240 /* 0x87 */ iemOp_jnbe_Jv,
7241 /* 0x88 */ iemOp_js_Jv,
7242 /* 0x89 */ iemOp_jns_Jv,
7243 /* 0x8a */ iemOp_jp_Jv,
7244 /* 0x8b */ iemOp_jnp_Jv,
7245 /* 0x8c */ iemOp_jl_Jv,
7246 /* 0x8d */ iemOp_jnl_Jv,
7247 /* 0x8e */ iemOp_jle_Jv,
7248 /* 0x8f */ iemOp_jnle_Jv,
7249 /* 0x90 */ iemOp_seto_Eb,
7250 /* 0x91 */ iemOp_setno_Eb,
7251 /* 0x92 */ iemOp_setc_Eb,
7252 /* 0x93 */ iemOp_setnc_Eb,
7253 /* 0x94 */ iemOp_sete_Eb,
7254 /* 0x95 */ iemOp_setne_Eb,
7255 /* 0x96 */ iemOp_setbe_Eb,
7256 /* 0x97 */ iemOp_setnbe_Eb,
7257 /* 0x98 */ iemOp_sets_Eb,
7258 /* 0x99 */ iemOp_setns_Eb,
7259 /* 0x9a */ iemOp_setp_Eb,
7260 /* 0x9b */ iemOp_setnp_Eb,
7261 /* 0x9c */ iemOp_setl_Eb,
7262 /* 0x9d */ iemOp_setnl_Eb,
7263 /* 0x9e */ iemOp_setle_Eb,
7264 /* 0x9f */ iemOp_setnle_Eb,
7265 /* 0xa0 */ iemOp_push_fs,
7266 /* 0xa1 */ iemOp_pop_fs,
7267 /* 0xa2 */ iemOp_cpuid,
7268 /* 0xa3 */ iemOp_bt_Ev_Gv,
7269 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7270 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7271 /* 0xa6 */ iemOp_Invalid,
7272 /* 0xa7 */ iemOp_Invalid,
7273 /* 0xa8 */ iemOp_push_gs,
7274 /* 0xa9 */ iemOp_pop_gs,
7275 /* 0xaa */ iemOp_rsm,
7276 /* 0xab */ iemOp_bts_Ev_Gv,
7277 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7278 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7279 /* 0xae */ iemOp_Grp15,
7280 /* 0xaf */ iemOp_imul_Gv_Ev,
7281 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7282 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7283 /* 0xb2 */ iemOp_lss_Gv_Mp,
7284 /* 0xb3 */ iemOp_btr_Ev_Gv,
7285 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7286 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7287 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7288 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7289 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7290 /* 0xb9 */ iemOp_Grp10,
7291 /* 0xba */ iemOp_Grp8,
7292 /* 0xbd */ iemOp_btc_Ev_Gv,
7293 /* 0xbc */ iemOp_bsf_Gv_Ev,
7294 /* 0xbd */ iemOp_bsr_Gv_Ev,
7295 /* 0xbe */ iemOp_movsx_Gv_Eb,
7296 /* 0xbf */ iemOp_movsx_Gv_Ew,
7297 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7298 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7299 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7300 /* 0xc3 */ iemOp_movnti_My_Gy,
7301 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7302 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7303 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7304 /* 0xc7 */ iemOp_Grp9,
7305 /* 0xc8 */ iemOp_bswap_rAX_r8,
7306 /* 0xc9 */ iemOp_bswap_rCX_r9,
7307 /* 0xca */ iemOp_bswap_rDX_r10,
7308 /* 0xcb */ iemOp_bswap_rBX_r11,
7309 /* 0xcc */ iemOp_bswap_rSP_r12,
7310 /* 0xcd */ iemOp_bswap_rBP_r13,
7311 /* 0xce */ iemOp_bswap_rSI_r14,
7312 /* 0xcf */ iemOp_bswap_rDI_r15,
7313 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7314 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7315 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7316 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7317 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7318 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7319 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7320 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7321 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7322 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7323 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7324 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7325 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7326 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7327 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7328 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7329 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7330 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7331 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7332 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7333 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7334 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7335 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7336 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7337 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7338 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7339 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7340 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7341 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7342 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7343 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7344 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7345 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7346 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7347 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7348 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7349 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7350 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7351 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7352 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7353 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7354 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7355 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7356 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7357 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7358 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7359 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7360 /* 0xff */ iemOp_Invalid
7361};
7362
7363/** @} */
7364
7365
7366/** @name One byte opcodes.
7367 *
7368 * @{
7369 */
7370
7371/** Opcode 0x00. */
7372FNIEMOP_DEF(iemOp_add_Eb_Gb)
7373{
7374 IEMOP_MNEMONIC("add Eb,Gb");
7375 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7376}
7377
7378
7379/** Opcode 0x01. */
7380FNIEMOP_DEF(iemOp_add_Ev_Gv)
7381{
7382 IEMOP_MNEMONIC("add Ev,Gv");
7383 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7384}
7385
7386
7387/** Opcode 0x02. */
7388FNIEMOP_DEF(iemOp_add_Gb_Eb)
7389{
7390 IEMOP_MNEMONIC("add Gb,Eb");
7391 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7392}
7393
7394
7395/** Opcode 0x03. */
7396FNIEMOP_DEF(iemOp_add_Gv_Ev)
7397{
7398 IEMOP_MNEMONIC("add Gv,Ev");
7399 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7400}
7401
7402
7403/** Opcode 0x04. */
7404FNIEMOP_DEF(iemOp_add_Al_Ib)
7405{
7406 IEMOP_MNEMONIC("add al,Ib");
7407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7408}
7409
7410
7411/** Opcode 0x05. */
7412FNIEMOP_DEF(iemOp_add_eAX_Iz)
7413{
7414 IEMOP_MNEMONIC("add rAX,Iz");
7415 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7416}
7417
7418
7419/** Opcode 0x06. */
7420FNIEMOP_DEF(iemOp_push_ES)
7421{
7422 IEMOP_MNEMONIC("push es");
7423 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7424}
7425
7426
7427/** Opcode 0x07. */
7428FNIEMOP_DEF(iemOp_pop_ES)
7429{
7430 IEMOP_MNEMONIC("pop es");
7431 IEMOP_HLP_NO_64BIT();
7432 IEMOP_HLP_NO_LOCK_PREFIX();
7433 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7434}
7435
7436
7437/** Opcode 0x08. */
7438FNIEMOP_DEF(iemOp_or_Eb_Gb)
7439{
7440 IEMOP_MNEMONIC("or Eb,Gb");
7441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7442 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7443}
7444
7445
7446/** Opcode 0x09. */
7447FNIEMOP_DEF(iemOp_or_Ev_Gv)
7448{
7449 IEMOP_MNEMONIC("or Ev,Gv ");
7450 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7451 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7452}
7453
7454
7455/** Opcode 0x0a. */
7456FNIEMOP_DEF(iemOp_or_Gb_Eb)
7457{
7458 IEMOP_MNEMONIC("or Gb,Eb");
7459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7460 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7461}
7462
7463
7464/** Opcode 0x0b. */
7465FNIEMOP_DEF(iemOp_or_Gv_Ev)
7466{
7467 IEMOP_MNEMONIC("or Gv,Ev");
7468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7469 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7470}
7471
7472
7473/** Opcode 0x0c. */
7474FNIEMOP_DEF(iemOp_or_Al_Ib)
7475{
7476 IEMOP_MNEMONIC("or al,Ib");
7477 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7478 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7479}
7480
7481
7482/** Opcode 0x0d. */
7483FNIEMOP_DEF(iemOp_or_eAX_Iz)
7484{
7485 IEMOP_MNEMONIC("or rAX,Iz");
7486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7488}
7489
7490
7491/** Opcode 0x0e. */
7492FNIEMOP_DEF(iemOp_push_CS)
7493{
7494 IEMOP_MNEMONIC("push cs");
7495 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7496}
7497
7498
7499/** Opcode 0x0f. */
7500FNIEMOP_DEF(iemOp_2byteEscape)
7501{
7502 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7503 /** @todo PUSH CS on 8086, undefined on 80186. */
7504 IEMOP_HLP_MIN_286();
7505 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7506}
7507
7508/** Opcode 0x10. */
7509FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7510{
7511 IEMOP_MNEMONIC("adc Eb,Gb");
7512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7513}
7514
7515
7516/** Opcode 0x11. */
7517FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7518{
7519 IEMOP_MNEMONIC("adc Ev,Gv");
7520 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7521}
7522
7523
7524/** Opcode 0x12. */
7525FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7526{
7527 IEMOP_MNEMONIC("adc Gb,Eb");
7528 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7529}
7530
7531
7532/** Opcode 0x13. */
7533FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7534{
7535 IEMOP_MNEMONIC("adc Gv,Ev");
7536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7537}
7538
7539
7540/** Opcode 0x14. */
7541FNIEMOP_DEF(iemOp_adc_Al_Ib)
7542{
7543 IEMOP_MNEMONIC("adc al,Ib");
7544 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7545}
7546
7547
7548/** Opcode 0x15. */
7549FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7550{
7551 IEMOP_MNEMONIC("adc rAX,Iz");
7552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7553}
7554
7555
7556/** Opcode 0x16. */
7557FNIEMOP_DEF(iemOp_push_SS)
7558{
7559 IEMOP_MNEMONIC("push ss");
7560 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7561}
7562
7563
7564/** Opcode 0x17. */
7565FNIEMOP_DEF(iemOp_pop_SS)
7566{
7567 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7568 IEMOP_HLP_NO_LOCK_PREFIX();
7569 IEMOP_HLP_NO_64BIT();
7570 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7571}
7572
7573
7574/** Opcode 0x18. */
7575FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7576{
7577 IEMOP_MNEMONIC("sbb Eb,Gb");
7578 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7579}
7580
7581
7582/** Opcode 0x19. */
7583FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7584{
7585 IEMOP_MNEMONIC("sbb Ev,Gv");
7586 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7587}
7588
7589
7590/** Opcode 0x1a. */
7591FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7592{
7593 IEMOP_MNEMONIC("sbb Gb,Eb");
7594 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7595}
7596
7597
7598/** Opcode 0x1b. */
7599FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7600{
7601 IEMOP_MNEMONIC("sbb Gv,Ev");
7602 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7603}
7604
7605
7606/** Opcode 0x1c. */
7607FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7608{
7609 IEMOP_MNEMONIC("sbb al,Ib");
7610 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7611}
7612
7613
7614/** Opcode 0x1d. */
7615FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7616{
7617 IEMOP_MNEMONIC("sbb rAX,Iz");
7618 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7619}
7620
7621
7622/** Opcode 0x1e. */
7623FNIEMOP_DEF(iemOp_push_DS)
7624{
7625 IEMOP_MNEMONIC("push ds");
7626 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7627}
7628
7629
7630/** Opcode 0x1f. */
7631FNIEMOP_DEF(iemOp_pop_DS)
7632{
7633 IEMOP_MNEMONIC("pop ds");
7634 IEMOP_HLP_NO_LOCK_PREFIX();
7635 IEMOP_HLP_NO_64BIT();
7636 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7637}
7638
7639
7640/** Opcode 0x20. */
7641FNIEMOP_DEF(iemOp_and_Eb_Gb)
7642{
7643 IEMOP_MNEMONIC("and Eb,Gb");
7644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7645 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7646}
7647
7648
7649/** Opcode 0x21. */
7650FNIEMOP_DEF(iemOp_and_Ev_Gv)
7651{
7652 IEMOP_MNEMONIC("and Ev,Gv");
7653 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7654 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7655}
7656
7657
7658/** Opcode 0x22. */
7659FNIEMOP_DEF(iemOp_and_Gb_Eb)
7660{
7661 IEMOP_MNEMONIC("and Gb,Eb");
7662 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7663 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7664}
7665
7666
7667/** Opcode 0x23. */
7668FNIEMOP_DEF(iemOp_and_Gv_Ev)
7669{
7670 IEMOP_MNEMONIC("and Gv,Ev");
7671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7672 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7673}
7674
7675
7676/** Opcode 0x24. */
7677FNIEMOP_DEF(iemOp_and_Al_Ib)
7678{
7679 IEMOP_MNEMONIC("and al,Ib");
7680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7682}
7683
7684
7685/** Opcode 0x25. */
7686FNIEMOP_DEF(iemOp_and_eAX_Iz)
7687{
7688 IEMOP_MNEMONIC("and rAX,Iz");
7689 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7690 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7691}
7692
7693
7694/** Opcode 0x26. */
7695FNIEMOP_DEF(iemOp_seg_ES)
7696{
7697 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7698 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7699 pIemCpu->iEffSeg = X86_SREG_ES;
7700
7701 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7702 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7703}
7704
7705
7706/** Opcode 0x27. */
7707FNIEMOP_DEF(iemOp_daa)
7708{
7709 IEMOP_MNEMONIC("daa AL");
7710 IEMOP_HLP_NO_64BIT();
7711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7712 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7713 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7714}
7715
7716
7717/** Opcode 0x28. */
7718FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7719{
7720 IEMOP_MNEMONIC("sub Eb,Gb");
7721 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7722}
7723
7724
7725/** Opcode 0x29. */
7726FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7727{
7728 IEMOP_MNEMONIC("sub Ev,Gv");
7729 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7730}
7731
7732
7733/** Opcode 0x2a. */
7734FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7735{
7736 IEMOP_MNEMONIC("sub Gb,Eb");
7737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7738}
7739
7740
7741/** Opcode 0x2b. */
7742FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7743{
7744 IEMOP_MNEMONIC("sub Gv,Ev");
7745 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7746}
7747
7748
7749/** Opcode 0x2c. */
7750FNIEMOP_DEF(iemOp_sub_Al_Ib)
7751{
7752 IEMOP_MNEMONIC("sub al,Ib");
7753 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7754}
7755
7756
7757/** Opcode 0x2d. */
7758FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7759{
7760 IEMOP_MNEMONIC("sub rAX,Iz");
7761 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7762}
7763
7764
7765/** Opcode 0x2e. */
7766FNIEMOP_DEF(iemOp_seg_CS)
7767{
7768 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7769 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7770 pIemCpu->iEffSeg = X86_SREG_CS;
7771
7772 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7773 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7774}
7775
7776
7777/** Opcode 0x2f. */
7778FNIEMOP_DEF(iemOp_das)
7779{
7780 IEMOP_MNEMONIC("das AL");
7781 IEMOP_HLP_NO_64BIT();
7782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7783 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7784 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7785}
7786
7787
7788/** Opcode 0x30. */
7789FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7790{
7791 IEMOP_MNEMONIC("xor Eb,Gb");
7792 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7793 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7794}
7795
7796
7797/** Opcode 0x31. */
7798FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7799{
7800 IEMOP_MNEMONIC("xor Ev,Gv");
7801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7802 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7803}
7804
7805
7806/** Opcode 0x32. */
7807FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7808{
7809 IEMOP_MNEMONIC("xor Gb,Eb");
7810 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7811 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7812}
7813
7814
7815/** Opcode 0x33. */
7816FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7817{
7818 IEMOP_MNEMONIC("xor Gv,Ev");
7819 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7820 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7821}
7822
7823
7824/** Opcode 0x34. */
7825FNIEMOP_DEF(iemOp_xor_Al_Ib)
7826{
7827 IEMOP_MNEMONIC("xor al,Ib");
7828 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7829 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7830}
7831
7832
7833/** Opcode 0x35. */
7834FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7835{
7836 IEMOP_MNEMONIC("xor rAX,Iz");
7837 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7838 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7839}
7840
7841
7842/** Opcode 0x36. */
7843FNIEMOP_DEF(iemOp_seg_SS)
7844{
7845 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7846 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7847 pIemCpu->iEffSeg = X86_SREG_SS;
7848
7849 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7850 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7851}
7852
7853
7854/** Opcode 0x37. */
7855FNIEMOP_STUB(iemOp_aaa);
7856
7857
7858/** Opcode 0x38. */
7859FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7860{
7861 IEMOP_MNEMONIC("cmp Eb,Gb");
7862 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7863 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7864}
7865
7866
7867/** Opcode 0x39. */
7868FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7869{
7870 IEMOP_MNEMONIC("cmp Ev,Gv");
7871 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7872 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7873}
7874
7875
7876/** Opcode 0x3a. */
7877FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7878{
7879 IEMOP_MNEMONIC("cmp Gb,Eb");
7880 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7881}
7882
7883
7884/** Opcode 0x3b. */
7885FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7886{
7887 IEMOP_MNEMONIC("cmp Gv,Ev");
7888 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7889}
7890
7891
7892/** Opcode 0x3c. */
7893FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7894{
7895 IEMOP_MNEMONIC("cmp al,Ib");
7896 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7897}
7898
7899
7900/** Opcode 0x3d. */
7901FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7902{
7903 IEMOP_MNEMONIC("cmp rAX,Iz");
7904 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7905}
7906
7907
7908/** Opcode 0x3e. */
7909FNIEMOP_DEF(iemOp_seg_DS)
7910{
7911 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7912 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7913 pIemCpu->iEffSeg = X86_SREG_DS;
7914
7915 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7916 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7917}
7918
7919
7920/** Opcode 0x3f. */
7921FNIEMOP_STUB(iemOp_aas);
7922
7923/**
7924 * Common 'inc/dec/not/neg register' helper.
7925 */
7926FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7927{
7928 IEMOP_HLP_NO_LOCK_PREFIX();
7929 switch (pIemCpu->enmEffOpSize)
7930 {
7931 case IEMMODE_16BIT:
7932 IEM_MC_BEGIN(2, 0);
7933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7934 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7935 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7936 IEM_MC_REF_EFLAGS(pEFlags);
7937 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7938 IEM_MC_ADVANCE_RIP();
7939 IEM_MC_END();
7940 return VINF_SUCCESS;
7941
7942 case IEMMODE_32BIT:
7943 IEM_MC_BEGIN(2, 0);
7944 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7945 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7946 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7947 IEM_MC_REF_EFLAGS(pEFlags);
7948 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7949 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7950 IEM_MC_ADVANCE_RIP();
7951 IEM_MC_END();
7952 return VINF_SUCCESS;
7953
7954 case IEMMODE_64BIT:
7955 IEM_MC_BEGIN(2, 0);
7956 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7957 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7958 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7959 IEM_MC_REF_EFLAGS(pEFlags);
7960 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7961 IEM_MC_ADVANCE_RIP();
7962 IEM_MC_END();
7963 return VINF_SUCCESS;
7964 }
7965 return VINF_SUCCESS;
7966}
7967
7968
7969/** Opcode 0x40. */
7970FNIEMOP_DEF(iemOp_inc_eAX)
7971{
7972 /*
7973 * This is a REX prefix in 64-bit mode.
7974 */
7975 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7976 {
7977 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7978 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7979
7980 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7981 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7982 }
7983
7984 IEMOP_MNEMONIC("inc eAX");
7985 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7986}
7987
7988
7989/** Opcode 0x41. */
7990FNIEMOP_DEF(iemOp_inc_eCX)
7991{
7992 /*
7993 * This is a REX prefix in 64-bit mode.
7994 */
7995 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7996 {
7997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7998 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7999 pIemCpu->uRexB = 1 << 3;
8000
8001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8003 }
8004
8005 IEMOP_MNEMONIC("inc eCX");
8006 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8007}
8008
8009
8010/** Opcode 0x42. */
8011FNIEMOP_DEF(iemOp_inc_eDX)
8012{
8013 /*
8014 * This is a REX prefix in 64-bit mode.
8015 */
8016 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8017 {
8018 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8019 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8020 pIemCpu->uRexIndex = 1 << 3;
8021
8022 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8023 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8024 }
8025
8026 IEMOP_MNEMONIC("inc eDX");
8027 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8028}
8029
8030
8031
8032/** Opcode 0x43. */
8033FNIEMOP_DEF(iemOp_inc_eBX)
8034{
8035 /*
8036 * This is a REX prefix in 64-bit mode.
8037 */
8038 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8039 {
8040 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8041 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8042 pIemCpu->uRexB = 1 << 3;
8043 pIemCpu->uRexIndex = 1 << 3;
8044
8045 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8046 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8047 }
8048
8049 IEMOP_MNEMONIC("inc eBX");
8050 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8051}
8052
8053
8054/** Opcode 0x44. */
8055FNIEMOP_DEF(iemOp_inc_eSP)
8056{
8057 /*
8058 * This is a REX prefix in 64-bit mode.
8059 */
8060 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8061 {
8062 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8063 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8064 pIemCpu->uRexReg = 1 << 3;
8065
8066 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8067 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8068 }
8069
8070 IEMOP_MNEMONIC("inc eSP");
8071 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8072}
8073
8074
8075/** Opcode 0x45. */
8076FNIEMOP_DEF(iemOp_inc_eBP)
8077{
8078 /*
8079 * This is a REX prefix in 64-bit mode.
8080 */
8081 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8082 {
8083 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8084 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8085 pIemCpu->uRexReg = 1 << 3;
8086 pIemCpu->uRexB = 1 << 3;
8087
8088 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8089 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8090 }
8091
8092 IEMOP_MNEMONIC("inc eBP");
8093 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8094}
8095
8096
8097/** Opcode 0x46. */
8098FNIEMOP_DEF(iemOp_inc_eSI)
8099{
8100 /*
8101 * This is a REX prefix in 64-bit mode.
8102 */
8103 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8104 {
8105 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8106 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8107 pIemCpu->uRexReg = 1 << 3;
8108 pIemCpu->uRexIndex = 1 << 3;
8109
8110 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8111 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8112 }
8113
8114 IEMOP_MNEMONIC("inc eSI");
8115 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8116}
8117
8118
8119/** Opcode 0x47. */
8120FNIEMOP_DEF(iemOp_inc_eDI)
8121{
8122 /*
8123 * This is a REX prefix in 64-bit mode.
8124 */
8125 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8126 {
8127 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8128 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8129 pIemCpu->uRexReg = 1 << 3;
8130 pIemCpu->uRexB = 1 << 3;
8131 pIemCpu->uRexIndex = 1 << 3;
8132
8133 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8134 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8135 }
8136
8137 IEMOP_MNEMONIC("inc eDI");
8138 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8139}
8140
8141
8142/** Opcode 0x48. */
8143FNIEMOP_DEF(iemOp_dec_eAX)
8144{
8145 /*
8146 * This is a REX prefix in 64-bit mode.
8147 */
8148 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8149 {
8150 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8151 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8152 iemRecalEffOpSize(pIemCpu);
8153
8154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8156 }
8157
8158 IEMOP_MNEMONIC("dec eAX");
8159 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8160}
8161
8162
8163/** Opcode 0x49. */
8164FNIEMOP_DEF(iemOp_dec_eCX)
8165{
8166 /*
8167 * This is a REX prefix in 64-bit mode.
8168 */
8169 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8170 {
8171 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8172 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8173 pIemCpu->uRexB = 1 << 3;
8174 iemRecalEffOpSize(pIemCpu);
8175
8176 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8177 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8178 }
8179
8180 IEMOP_MNEMONIC("dec eCX");
8181 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8182}
8183
8184
8185/** Opcode 0x4a. */
8186FNIEMOP_DEF(iemOp_dec_eDX)
8187{
8188 /*
8189 * This is a REX prefix in 64-bit mode.
8190 */
8191 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8192 {
8193 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8194 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8195 pIemCpu->uRexIndex = 1 << 3;
8196 iemRecalEffOpSize(pIemCpu);
8197
8198 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8199 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8200 }
8201
8202 IEMOP_MNEMONIC("dec eDX");
8203 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8204}
8205
8206
8207/** Opcode 0x4b. */
8208FNIEMOP_DEF(iemOp_dec_eBX)
8209{
8210 /*
8211 * This is a REX prefix in 64-bit mode.
8212 */
8213 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8214 {
8215 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8216 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8217 pIemCpu->uRexB = 1 << 3;
8218 pIemCpu->uRexIndex = 1 << 3;
8219 iemRecalEffOpSize(pIemCpu);
8220
8221 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8222 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8223 }
8224
8225 IEMOP_MNEMONIC("dec eBX");
8226 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8227}
8228
8229
8230/** Opcode 0x4c. */
8231FNIEMOP_DEF(iemOp_dec_eSP)
8232{
8233 /*
8234 * This is a REX prefix in 64-bit mode.
8235 */
8236 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8237 {
8238 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8239 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8240 pIemCpu->uRexReg = 1 << 3;
8241 iemRecalEffOpSize(pIemCpu);
8242
8243 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8244 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8245 }
8246
8247 IEMOP_MNEMONIC("dec eSP");
8248 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8249}
8250
8251
8252/** Opcode 0x4d. */
8253FNIEMOP_DEF(iemOp_dec_eBP)
8254{
8255 /*
8256 * This is a REX prefix in 64-bit mode.
8257 */
8258 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8259 {
8260 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8261 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8262 pIemCpu->uRexReg = 1 << 3;
8263 pIemCpu->uRexB = 1 << 3;
8264 iemRecalEffOpSize(pIemCpu);
8265
8266 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8267 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8268 }
8269
8270 IEMOP_MNEMONIC("dec eBP");
8271 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8272}
8273
8274
8275/** Opcode 0x4e. */
8276FNIEMOP_DEF(iemOp_dec_eSI)
8277{
8278 /*
8279 * This is a REX prefix in 64-bit mode.
8280 */
8281 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8282 {
8283 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8284 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8285 pIemCpu->uRexReg = 1 << 3;
8286 pIemCpu->uRexIndex = 1 << 3;
8287 iemRecalEffOpSize(pIemCpu);
8288
8289 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8290 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8291 }
8292
8293 IEMOP_MNEMONIC("dec eSI");
8294 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8295}
8296
8297
8298/** Opcode 0x4f. */
8299FNIEMOP_DEF(iemOp_dec_eDI)
8300{
8301 /*
8302 * This is a REX prefix in 64-bit mode.
8303 */
8304 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8305 {
8306 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8307 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8308 pIemCpu->uRexReg = 1 << 3;
8309 pIemCpu->uRexB = 1 << 3;
8310 pIemCpu->uRexIndex = 1 << 3;
8311 iemRecalEffOpSize(pIemCpu);
8312
8313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8315 }
8316
8317 IEMOP_MNEMONIC("dec eDI");
8318 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8319}
8320
8321
8322/**
8323 * Common 'push register' helper.
8324 */
8325FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8326{
8327 IEMOP_HLP_NO_LOCK_PREFIX();
8328 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8329 {
8330 iReg |= pIemCpu->uRexB;
8331 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8332 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8333 }
8334
8335 switch (pIemCpu->enmEffOpSize)
8336 {
8337 case IEMMODE_16BIT:
8338 IEM_MC_BEGIN(0, 1);
8339 IEM_MC_LOCAL(uint16_t, u16Value);
8340 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8341 IEM_MC_PUSH_U16(u16Value);
8342 IEM_MC_ADVANCE_RIP();
8343 IEM_MC_END();
8344 break;
8345
8346 case IEMMODE_32BIT:
8347 IEM_MC_BEGIN(0, 1);
8348 IEM_MC_LOCAL(uint32_t, u32Value);
8349 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8350 IEM_MC_PUSH_U32(u32Value);
8351 IEM_MC_ADVANCE_RIP();
8352 IEM_MC_END();
8353 break;
8354
8355 case IEMMODE_64BIT:
8356 IEM_MC_BEGIN(0, 1);
8357 IEM_MC_LOCAL(uint64_t, u64Value);
8358 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8359 IEM_MC_PUSH_U64(u64Value);
8360 IEM_MC_ADVANCE_RIP();
8361 IEM_MC_END();
8362 break;
8363 }
8364
8365 return VINF_SUCCESS;
8366}
8367
8368
8369/** Opcode 0x50. */
8370FNIEMOP_DEF(iemOp_push_eAX)
8371{
8372 IEMOP_MNEMONIC("push rAX");
8373 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8374}
8375
8376
8377/** Opcode 0x51. */
8378FNIEMOP_DEF(iemOp_push_eCX)
8379{
8380 IEMOP_MNEMONIC("push rCX");
8381 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8382}
8383
8384
8385/** Opcode 0x52. */
8386FNIEMOP_DEF(iemOp_push_eDX)
8387{
8388 IEMOP_MNEMONIC("push rDX");
8389 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8390}
8391
8392
8393/** Opcode 0x53. */
8394FNIEMOP_DEF(iemOp_push_eBX)
8395{
8396 IEMOP_MNEMONIC("push rBX");
8397 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8398}
8399
8400
8401/** Opcode 0x54. */
8402FNIEMOP_DEF(iemOp_push_eSP)
8403{
8404 IEMOP_MNEMONIC("push rSP");
8405 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8406 {
8407 IEM_MC_BEGIN(0, 1);
8408 IEM_MC_LOCAL(uint16_t, u16Value);
8409 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8410 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8411 IEM_MC_PUSH_U16(u16Value);
8412 IEM_MC_ADVANCE_RIP();
8413 IEM_MC_END();
8414 }
8415 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8416}
8417
8418
8419/** Opcode 0x55. */
8420FNIEMOP_DEF(iemOp_push_eBP)
8421{
8422 IEMOP_MNEMONIC("push rBP");
8423 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8424}
8425
8426
8427/** Opcode 0x56. */
8428FNIEMOP_DEF(iemOp_push_eSI)
8429{
8430 IEMOP_MNEMONIC("push rSI");
8431 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8432}
8433
8434
8435/** Opcode 0x57. */
8436FNIEMOP_DEF(iemOp_push_eDI)
8437{
8438 IEMOP_MNEMONIC("push rDI");
8439 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8440}
8441
8442
8443/**
8444 * Common 'pop register' helper.
8445 */
8446FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8447{
8448 IEMOP_HLP_NO_LOCK_PREFIX();
8449 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8450 {
8451 iReg |= pIemCpu->uRexB;
8452 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8453 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8454 }
8455
8456 switch (pIemCpu->enmEffOpSize)
8457 {
8458 case IEMMODE_16BIT:
8459 IEM_MC_BEGIN(0, 1);
8460 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8461 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8462 IEM_MC_POP_U16(pu16Dst);
8463 IEM_MC_ADVANCE_RIP();
8464 IEM_MC_END();
8465 break;
8466
8467 case IEMMODE_32BIT:
8468 IEM_MC_BEGIN(0, 1);
8469 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8470 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8471 IEM_MC_POP_U32(pu32Dst);
8472 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8473 IEM_MC_ADVANCE_RIP();
8474 IEM_MC_END();
8475 break;
8476
8477 case IEMMODE_64BIT:
8478 IEM_MC_BEGIN(0, 1);
8479 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8480 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8481 IEM_MC_POP_U64(pu64Dst);
8482 IEM_MC_ADVANCE_RIP();
8483 IEM_MC_END();
8484 break;
8485 }
8486
8487 return VINF_SUCCESS;
8488}
8489
8490
8491/** Opcode 0x58. */
8492FNIEMOP_DEF(iemOp_pop_eAX)
8493{
8494 IEMOP_MNEMONIC("pop rAX");
8495 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8496}
8497
8498
8499/** Opcode 0x59. */
8500FNIEMOP_DEF(iemOp_pop_eCX)
8501{
8502 IEMOP_MNEMONIC("pop rCX");
8503 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8504}
8505
8506
8507/** Opcode 0x5a. */
8508FNIEMOP_DEF(iemOp_pop_eDX)
8509{
8510 IEMOP_MNEMONIC("pop rDX");
8511 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8512}
8513
8514
8515/** Opcode 0x5b. */
8516FNIEMOP_DEF(iemOp_pop_eBX)
8517{
8518 IEMOP_MNEMONIC("pop rBX");
8519 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8520}
8521
8522
8523/** Opcode 0x5c. */
8524FNIEMOP_DEF(iemOp_pop_eSP)
8525{
8526 IEMOP_MNEMONIC("pop rSP");
8527 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8528 {
8529 if (pIemCpu->uRexB)
8530 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8531 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8532 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8533 }
8534
8535 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8536 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8537 /** @todo add testcase for this instruction. */
8538 switch (pIemCpu->enmEffOpSize)
8539 {
8540 case IEMMODE_16BIT:
8541 IEM_MC_BEGIN(0, 1);
8542 IEM_MC_LOCAL(uint16_t, u16Dst);
8543 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8544 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8545 IEM_MC_ADVANCE_RIP();
8546 IEM_MC_END();
8547 break;
8548
8549 case IEMMODE_32BIT:
8550 IEM_MC_BEGIN(0, 1);
8551 IEM_MC_LOCAL(uint32_t, u32Dst);
8552 IEM_MC_POP_U32(&u32Dst);
8553 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8554 IEM_MC_ADVANCE_RIP();
8555 IEM_MC_END();
8556 break;
8557
8558 case IEMMODE_64BIT:
8559 IEM_MC_BEGIN(0, 1);
8560 IEM_MC_LOCAL(uint64_t, u64Dst);
8561 IEM_MC_POP_U64(&u64Dst);
8562 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8563 IEM_MC_ADVANCE_RIP();
8564 IEM_MC_END();
8565 break;
8566 }
8567
8568 return VINF_SUCCESS;
8569}
8570
8571
8572/** Opcode 0x5d. */
8573FNIEMOP_DEF(iemOp_pop_eBP)
8574{
8575 IEMOP_MNEMONIC("pop rBP");
8576 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8577}
8578
8579
8580/** Opcode 0x5e. */
8581FNIEMOP_DEF(iemOp_pop_eSI)
8582{
8583 IEMOP_MNEMONIC("pop rSI");
8584 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8585}
8586
8587
8588/** Opcode 0x5f. */
8589FNIEMOP_DEF(iemOp_pop_eDI)
8590{
8591 IEMOP_MNEMONIC("pop rDI");
8592 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8593}
8594
8595
8596/** Opcode 0x60. */
8597FNIEMOP_DEF(iemOp_pusha)
8598{
8599 IEMOP_MNEMONIC("pusha");
8600 IEMOP_HLP_MIN_186();
8601 IEMOP_HLP_NO_64BIT();
8602 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8603 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8604 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8605 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8606}
8607
8608
8609/** Opcode 0x61. */
8610FNIEMOP_DEF(iemOp_popa)
8611{
8612 IEMOP_MNEMONIC("popa");
8613 IEMOP_HLP_MIN_186();
8614 IEMOP_HLP_NO_64BIT();
8615 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8616 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8617 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8618 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8619}
8620
8621
8622/** Opcode 0x62. */
8623FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8624// IEMOP_HLP_MIN_186();
8625
8626
8627/** Opcode 0x63 - non-64-bit modes. */
8628FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8629{
8630 IEMOP_MNEMONIC("arpl Ew,Gw");
8631 IEMOP_HLP_MIN_286();
8632 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8634
8635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8636 {
8637 /* Register */
8638 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8639 IEM_MC_BEGIN(3, 0);
8640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8641 IEM_MC_ARG(uint16_t, u16Src, 1);
8642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8643
8644 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8645 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8646 IEM_MC_REF_EFLAGS(pEFlags);
8647 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8648
8649 IEM_MC_ADVANCE_RIP();
8650 IEM_MC_END();
8651 }
8652 else
8653 {
8654 /* Memory */
8655 IEM_MC_BEGIN(3, 2);
8656 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8657 IEM_MC_ARG(uint16_t, u16Src, 1);
8658 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8660
8661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8662 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8663 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8664 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8665 IEM_MC_FETCH_EFLAGS(EFlags);
8666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8667
8668 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8669 IEM_MC_COMMIT_EFLAGS(EFlags);
8670 IEM_MC_ADVANCE_RIP();
8671 IEM_MC_END();
8672 }
8673 return VINF_SUCCESS;
8674
8675}
8676
8677
8678/** Opcode 0x63.
8679 * @note This is a weird one. It works like a regular move instruction if
8680 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8681 * @todo This definitely needs a testcase to verify the odd cases. */
8682FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8683{
8684 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8685
8686 IEMOP_MNEMONIC("movsxd Gv,Ev");
8687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8688
8689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8690 {
8691 /*
8692 * Register to register.
8693 */
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695 IEM_MC_BEGIN(0, 1);
8696 IEM_MC_LOCAL(uint64_t, u64Value);
8697 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8698 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8699 IEM_MC_ADVANCE_RIP();
8700 IEM_MC_END();
8701 }
8702 else
8703 {
8704 /*
8705 * We're loading a register from memory.
8706 */
8707 IEM_MC_BEGIN(0, 2);
8708 IEM_MC_LOCAL(uint64_t, u64Value);
8709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8712 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8713 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8714 IEM_MC_ADVANCE_RIP();
8715 IEM_MC_END();
8716 }
8717 return VINF_SUCCESS;
8718}
8719
8720
8721/** Opcode 0x64. */
8722FNIEMOP_DEF(iemOp_seg_FS)
8723{
8724 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8725 IEMOP_HLP_MIN_386();
8726
8727 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8728 pIemCpu->iEffSeg = X86_SREG_FS;
8729
8730 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8731 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8732}
8733
8734
8735/** Opcode 0x65. */
8736FNIEMOP_DEF(iemOp_seg_GS)
8737{
8738 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8739 IEMOP_HLP_MIN_386();
8740
8741 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8742 pIemCpu->iEffSeg = X86_SREG_GS;
8743
8744 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8745 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8746}
8747
8748
8749/** Opcode 0x66. */
8750FNIEMOP_DEF(iemOp_op_size)
8751{
8752 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8753 IEMOP_HLP_MIN_386();
8754
8755 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8756 iemRecalEffOpSize(pIemCpu);
8757
8758 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8759 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8760}
8761
8762
8763/** Opcode 0x67. */
8764FNIEMOP_DEF(iemOp_addr_size)
8765{
8766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8767 IEMOP_HLP_MIN_386();
8768
8769 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8770 switch (pIemCpu->enmDefAddrMode)
8771 {
8772 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8773 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8774 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8775 default: AssertFailed();
8776 }
8777
8778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8780}
8781
8782
8783/** Opcode 0x68. */
8784FNIEMOP_DEF(iemOp_push_Iz)
8785{
8786 IEMOP_MNEMONIC("push Iz");
8787 IEMOP_HLP_MIN_186();
8788 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8789 switch (pIemCpu->enmEffOpSize)
8790 {
8791 case IEMMODE_16BIT:
8792 {
8793 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8794 IEMOP_HLP_NO_LOCK_PREFIX();
8795 IEM_MC_BEGIN(0,0);
8796 IEM_MC_PUSH_U16(u16Imm);
8797 IEM_MC_ADVANCE_RIP();
8798 IEM_MC_END();
8799 return VINF_SUCCESS;
8800 }
8801
8802 case IEMMODE_32BIT:
8803 {
8804 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8805 IEMOP_HLP_NO_LOCK_PREFIX();
8806 IEM_MC_BEGIN(0,0);
8807 IEM_MC_PUSH_U32(u32Imm);
8808 IEM_MC_ADVANCE_RIP();
8809 IEM_MC_END();
8810 return VINF_SUCCESS;
8811 }
8812
8813 case IEMMODE_64BIT:
8814 {
8815 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8816 IEMOP_HLP_NO_LOCK_PREFIX();
8817 IEM_MC_BEGIN(0,0);
8818 IEM_MC_PUSH_U64(u64Imm);
8819 IEM_MC_ADVANCE_RIP();
8820 IEM_MC_END();
8821 return VINF_SUCCESS;
8822 }
8823
8824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8825 }
8826}
8827
8828
8829/** Opcode 0x69. */
8830FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8831{
8832 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8833 IEMOP_HLP_MIN_186();
8834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8835 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8836
8837 switch (pIemCpu->enmEffOpSize)
8838 {
8839 case IEMMODE_16BIT:
8840 {
8841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8842 {
8843 /* register operand */
8844 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846
8847 IEM_MC_BEGIN(3, 1);
8848 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8849 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8850 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8851 IEM_MC_LOCAL(uint16_t, u16Tmp);
8852
8853 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8854 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8855 IEM_MC_REF_EFLAGS(pEFlags);
8856 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8857 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8858
8859 IEM_MC_ADVANCE_RIP();
8860 IEM_MC_END();
8861 }
8862 else
8863 {
8864 /* memory operand */
8865 IEM_MC_BEGIN(3, 2);
8866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8867 IEM_MC_ARG(uint16_t, u16Src, 1);
8868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8869 IEM_MC_LOCAL(uint16_t, u16Tmp);
8870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8871
8872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8873 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8874 IEM_MC_ASSIGN(u16Src, u16Imm);
8875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8876 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8877 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8878 IEM_MC_REF_EFLAGS(pEFlags);
8879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8880 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8881
8882 IEM_MC_ADVANCE_RIP();
8883 IEM_MC_END();
8884 }
8885 return VINF_SUCCESS;
8886 }
8887
8888 case IEMMODE_32BIT:
8889 {
8890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8891 {
8892 /* register operand */
8893 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8895
8896 IEM_MC_BEGIN(3, 1);
8897 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8898 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8900 IEM_MC_LOCAL(uint32_t, u32Tmp);
8901
8902 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8903 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8904 IEM_MC_REF_EFLAGS(pEFlags);
8905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8906 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8907
8908 IEM_MC_ADVANCE_RIP();
8909 IEM_MC_END();
8910 }
8911 else
8912 {
8913 /* memory operand */
8914 IEM_MC_BEGIN(3, 2);
8915 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8916 IEM_MC_ARG(uint32_t, u32Src, 1);
8917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8918 IEM_MC_LOCAL(uint32_t, u32Tmp);
8919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8920
8921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8922 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8923 IEM_MC_ASSIGN(u32Src, u32Imm);
8924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8925 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8926 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8927 IEM_MC_REF_EFLAGS(pEFlags);
8928 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8929 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8930
8931 IEM_MC_ADVANCE_RIP();
8932 IEM_MC_END();
8933 }
8934 return VINF_SUCCESS;
8935 }
8936
8937 case IEMMODE_64BIT:
8938 {
8939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8940 {
8941 /* register operand */
8942 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8944
8945 IEM_MC_BEGIN(3, 1);
8946 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8947 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8949 IEM_MC_LOCAL(uint64_t, u64Tmp);
8950
8951 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8952 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8953 IEM_MC_REF_EFLAGS(pEFlags);
8954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8955 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8956
8957 IEM_MC_ADVANCE_RIP();
8958 IEM_MC_END();
8959 }
8960 else
8961 {
8962 /* memory operand */
8963 IEM_MC_BEGIN(3, 2);
8964 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8965 IEM_MC_ARG(uint64_t, u64Src, 1);
8966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8967 IEM_MC_LOCAL(uint64_t, u64Tmp);
8968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8969
8970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8971 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8972 IEM_MC_ASSIGN(u64Src, u64Imm);
8973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8974 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8975 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8976 IEM_MC_REF_EFLAGS(pEFlags);
8977 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8978 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8979
8980 IEM_MC_ADVANCE_RIP();
8981 IEM_MC_END();
8982 }
8983 return VINF_SUCCESS;
8984 }
8985 }
8986 AssertFailedReturn(VERR_IEM_IPE_9);
8987}
8988
8989
8990/** Opcode 0x6a. */
8991FNIEMOP_DEF(iemOp_push_Ib)
8992{
8993 IEMOP_MNEMONIC("push Ib");
8994 IEMOP_HLP_MIN_186();
8995 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8996 IEMOP_HLP_NO_LOCK_PREFIX();
8997 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8998
8999 IEM_MC_BEGIN(0,0);
9000 switch (pIemCpu->enmEffOpSize)
9001 {
9002 case IEMMODE_16BIT:
9003 IEM_MC_PUSH_U16(i8Imm);
9004 break;
9005 case IEMMODE_32BIT:
9006 IEM_MC_PUSH_U32(i8Imm);
9007 break;
9008 case IEMMODE_64BIT:
9009 IEM_MC_PUSH_U64(i8Imm);
9010 break;
9011 }
9012 IEM_MC_ADVANCE_RIP();
9013 IEM_MC_END();
9014 return VINF_SUCCESS;
9015}
9016
9017
9018/** Opcode 0x6b. */
9019FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9020{
9021 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9022 IEMOP_HLP_MIN_186();
9023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9025
9026 switch (pIemCpu->enmEffOpSize)
9027 {
9028 case IEMMODE_16BIT:
9029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9030 {
9031 /* register operand */
9032 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9034
9035 IEM_MC_BEGIN(3, 1);
9036 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9037 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9039 IEM_MC_LOCAL(uint16_t, u16Tmp);
9040
9041 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9042 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9043 IEM_MC_REF_EFLAGS(pEFlags);
9044 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9045 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
9046
9047 IEM_MC_ADVANCE_RIP();
9048 IEM_MC_END();
9049 }
9050 else
9051 {
9052 /* memory operand */
9053 IEM_MC_BEGIN(3, 2);
9054 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9055 IEM_MC_ARG(uint16_t, u16Src, 1);
9056 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9057 IEM_MC_LOCAL(uint16_t, u16Tmp);
9058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9059
9060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9061 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9062 IEM_MC_ASSIGN(u16Src, u16Imm);
9063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9064 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9065 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9066 IEM_MC_REF_EFLAGS(pEFlags);
9067 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9068 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
9069
9070 IEM_MC_ADVANCE_RIP();
9071 IEM_MC_END();
9072 }
9073 return VINF_SUCCESS;
9074
9075 case IEMMODE_32BIT:
9076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9077 {
9078 /* register operand */
9079 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9081
9082 IEM_MC_BEGIN(3, 1);
9083 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9084 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9085 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9086 IEM_MC_LOCAL(uint32_t, u32Tmp);
9087
9088 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9089 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9090 IEM_MC_REF_EFLAGS(pEFlags);
9091 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9092 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9093
9094 IEM_MC_ADVANCE_RIP();
9095 IEM_MC_END();
9096 }
9097 else
9098 {
9099 /* memory operand */
9100 IEM_MC_BEGIN(3, 2);
9101 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9102 IEM_MC_ARG(uint32_t, u32Src, 1);
9103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9104 IEM_MC_LOCAL(uint32_t, u32Tmp);
9105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9106
9107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9108 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9109 IEM_MC_ASSIGN(u32Src, u32Imm);
9110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9111 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9112 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9113 IEM_MC_REF_EFLAGS(pEFlags);
9114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9115 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9116
9117 IEM_MC_ADVANCE_RIP();
9118 IEM_MC_END();
9119 }
9120 return VINF_SUCCESS;
9121
9122 case IEMMODE_64BIT:
9123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9124 {
9125 /* register operand */
9126 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9128
9129 IEM_MC_BEGIN(3, 1);
9130 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9131 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9133 IEM_MC_LOCAL(uint64_t, u64Tmp);
9134
9135 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9136 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9137 IEM_MC_REF_EFLAGS(pEFlags);
9138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9139 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9140
9141 IEM_MC_ADVANCE_RIP();
9142 IEM_MC_END();
9143 }
9144 else
9145 {
9146 /* memory operand */
9147 IEM_MC_BEGIN(3, 2);
9148 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9149 IEM_MC_ARG(uint64_t, u64Src, 1);
9150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9151 IEM_MC_LOCAL(uint64_t, u64Tmp);
9152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9153
9154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9155 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9156 IEM_MC_ASSIGN(u64Src, u64Imm);
9157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9158 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9159 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9160 IEM_MC_REF_EFLAGS(pEFlags);
9161 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9162 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9163
9164 IEM_MC_ADVANCE_RIP();
9165 IEM_MC_END();
9166 }
9167 return VINF_SUCCESS;
9168 }
9169 AssertFailedReturn(VERR_IEM_IPE_8);
9170}
9171
9172
9173/** Opcode 0x6c. */
9174FNIEMOP_DEF(iemOp_insb_Yb_DX)
9175{
9176 IEMOP_HLP_MIN_186();
9177 IEMOP_HLP_NO_LOCK_PREFIX();
9178 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9179 {
9180 IEMOP_MNEMONIC("rep ins Yb,DX");
9181 switch (pIemCpu->enmEffAddrMode)
9182 {
9183 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9184 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9185 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9187 }
9188 }
9189 else
9190 {
9191 IEMOP_MNEMONIC("ins Yb,DX");
9192 switch (pIemCpu->enmEffAddrMode)
9193 {
9194 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9195 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9196 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9198 }
9199 }
9200}
9201
9202
9203/** Opcode 0x6d. */
9204FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9205{
9206 IEMOP_HLP_MIN_186();
9207 IEMOP_HLP_NO_LOCK_PREFIX();
9208 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9209 {
9210 IEMOP_MNEMONIC("rep ins Yv,DX");
9211 switch (pIemCpu->enmEffOpSize)
9212 {
9213 case IEMMODE_16BIT:
9214 switch (pIemCpu->enmEffAddrMode)
9215 {
9216 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9217 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9218 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9220 }
9221 break;
9222 case IEMMODE_64BIT:
9223 case IEMMODE_32BIT:
9224 switch (pIemCpu->enmEffAddrMode)
9225 {
9226 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9227 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9228 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9230 }
9231 break;
9232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9233 }
9234 }
9235 else
9236 {
9237 IEMOP_MNEMONIC("ins Yv,DX");
9238 switch (pIemCpu->enmEffOpSize)
9239 {
9240 case IEMMODE_16BIT:
9241 switch (pIemCpu->enmEffAddrMode)
9242 {
9243 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9244 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9245 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9247 }
9248 break;
9249 case IEMMODE_64BIT:
9250 case IEMMODE_32BIT:
9251 switch (pIemCpu->enmEffAddrMode)
9252 {
9253 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9254 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9255 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9257 }
9258 break;
9259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9260 }
9261 }
9262}
9263
9264
9265/** Opcode 0x6e. */
9266FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9267{
9268 IEMOP_HLP_MIN_186();
9269 IEMOP_HLP_NO_LOCK_PREFIX();
9270 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9271 {
9272 IEMOP_MNEMONIC("rep outs DX,Yb");
9273 switch (pIemCpu->enmEffAddrMode)
9274 {
9275 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9276 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9277 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9279 }
9280 }
9281 else
9282 {
9283 IEMOP_MNEMONIC("outs DX,Yb");
9284 switch (pIemCpu->enmEffAddrMode)
9285 {
9286 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9287 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9288 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9289 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9290 }
9291 }
9292}
9293
9294
9295/** Opcode 0x6f. */
9296FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9297{
9298 IEMOP_HLP_MIN_186();
9299 IEMOP_HLP_NO_LOCK_PREFIX();
9300 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9301 {
9302 IEMOP_MNEMONIC("rep outs DX,Yv");
9303 switch (pIemCpu->enmEffOpSize)
9304 {
9305 case IEMMODE_16BIT:
9306 switch (pIemCpu->enmEffAddrMode)
9307 {
9308 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9309 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9310 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9312 }
9313 break;
9314 case IEMMODE_64BIT:
9315 case IEMMODE_32BIT:
9316 switch (pIemCpu->enmEffAddrMode)
9317 {
9318 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9319 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9320 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9322 }
9323 break;
9324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9325 }
9326 }
9327 else
9328 {
9329 IEMOP_MNEMONIC("outs DX,Yv");
9330 switch (pIemCpu->enmEffOpSize)
9331 {
9332 case IEMMODE_16BIT:
9333 switch (pIemCpu->enmEffAddrMode)
9334 {
9335 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9336 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9337 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9339 }
9340 break;
9341 case IEMMODE_64BIT:
9342 case IEMMODE_32BIT:
9343 switch (pIemCpu->enmEffAddrMode)
9344 {
9345 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9346 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9347 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9349 }
9350 break;
9351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9352 }
9353 }
9354}
9355
9356
9357/** Opcode 0x70. */
9358FNIEMOP_DEF(iemOp_jo_Jb)
9359{
9360 IEMOP_MNEMONIC("jo Jb");
9361 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9362 IEMOP_HLP_NO_LOCK_PREFIX();
9363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9364
9365 IEM_MC_BEGIN(0, 0);
9366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9367 IEM_MC_REL_JMP_S8(i8Imm);
9368 } IEM_MC_ELSE() {
9369 IEM_MC_ADVANCE_RIP();
9370 } IEM_MC_ENDIF();
9371 IEM_MC_END();
9372 return VINF_SUCCESS;
9373}
9374
9375
9376/** Opcode 0x71. */
9377FNIEMOP_DEF(iemOp_jno_Jb)
9378{
9379 IEMOP_MNEMONIC("jno Jb");
9380 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9381 IEMOP_HLP_NO_LOCK_PREFIX();
9382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9383
9384 IEM_MC_BEGIN(0, 0);
9385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9386 IEM_MC_ADVANCE_RIP();
9387 } IEM_MC_ELSE() {
9388 IEM_MC_REL_JMP_S8(i8Imm);
9389 } IEM_MC_ENDIF();
9390 IEM_MC_END();
9391 return VINF_SUCCESS;
9392}
9393
9394/** Opcode 0x72. */
9395FNIEMOP_DEF(iemOp_jc_Jb)
9396{
9397 IEMOP_MNEMONIC("jc/jnae Jb");
9398 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9399 IEMOP_HLP_NO_LOCK_PREFIX();
9400 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9401
9402 IEM_MC_BEGIN(0, 0);
9403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9404 IEM_MC_REL_JMP_S8(i8Imm);
9405 } IEM_MC_ELSE() {
9406 IEM_MC_ADVANCE_RIP();
9407 } IEM_MC_ENDIF();
9408 IEM_MC_END();
9409 return VINF_SUCCESS;
9410}
9411
9412
9413/** Opcode 0x73. */
9414FNIEMOP_DEF(iemOp_jnc_Jb)
9415{
9416 IEMOP_MNEMONIC("jnc/jnb Jb");
9417 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9418 IEMOP_HLP_NO_LOCK_PREFIX();
9419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9420
9421 IEM_MC_BEGIN(0, 0);
9422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9423 IEM_MC_ADVANCE_RIP();
9424 } IEM_MC_ELSE() {
9425 IEM_MC_REL_JMP_S8(i8Imm);
9426 } IEM_MC_ENDIF();
9427 IEM_MC_END();
9428 return VINF_SUCCESS;
9429}
9430
9431
9432/** Opcode 0x74. */
9433FNIEMOP_DEF(iemOp_je_Jb)
9434{
9435 IEMOP_MNEMONIC("je/jz Jb");
9436 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9437 IEMOP_HLP_NO_LOCK_PREFIX();
9438 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9439
9440 IEM_MC_BEGIN(0, 0);
9441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9442 IEM_MC_REL_JMP_S8(i8Imm);
9443 } IEM_MC_ELSE() {
9444 IEM_MC_ADVANCE_RIP();
9445 } IEM_MC_ENDIF();
9446 IEM_MC_END();
9447 return VINF_SUCCESS;
9448}
9449
9450
9451/** Opcode 0x75. */
9452FNIEMOP_DEF(iemOp_jne_Jb)
9453{
9454 IEMOP_MNEMONIC("jne/jnz Jb");
9455 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9456 IEMOP_HLP_NO_LOCK_PREFIX();
9457 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9458
9459 IEM_MC_BEGIN(0, 0);
9460 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9461 IEM_MC_ADVANCE_RIP();
9462 } IEM_MC_ELSE() {
9463 IEM_MC_REL_JMP_S8(i8Imm);
9464 } IEM_MC_ENDIF();
9465 IEM_MC_END();
9466 return VINF_SUCCESS;
9467}
9468
9469
9470/** Opcode 0x76. */
9471FNIEMOP_DEF(iemOp_jbe_Jb)
9472{
9473 IEMOP_MNEMONIC("jbe/jna Jb");
9474 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9475 IEMOP_HLP_NO_LOCK_PREFIX();
9476 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9477
9478 IEM_MC_BEGIN(0, 0);
9479 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9480 IEM_MC_REL_JMP_S8(i8Imm);
9481 } IEM_MC_ELSE() {
9482 IEM_MC_ADVANCE_RIP();
9483 } IEM_MC_ENDIF();
9484 IEM_MC_END();
9485 return VINF_SUCCESS;
9486}
9487
9488
9489/** Opcode 0x77. */
9490FNIEMOP_DEF(iemOp_jnbe_Jb)
9491{
9492 IEMOP_MNEMONIC("jnbe/ja Jb");
9493 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9494 IEMOP_HLP_NO_LOCK_PREFIX();
9495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9496
9497 IEM_MC_BEGIN(0, 0);
9498 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9499 IEM_MC_ADVANCE_RIP();
9500 } IEM_MC_ELSE() {
9501 IEM_MC_REL_JMP_S8(i8Imm);
9502 } IEM_MC_ENDIF();
9503 IEM_MC_END();
9504 return VINF_SUCCESS;
9505}
9506
9507
9508/** Opcode 0x78. */
9509FNIEMOP_DEF(iemOp_js_Jb)
9510{
9511 IEMOP_MNEMONIC("js Jb");
9512 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9513 IEMOP_HLP_NO_LOCK_PREFIX();
9514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9515
9516 IEM_MC_BEGIN(0, 0);
9517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9518 IEM_MC_REL_JMP_S8(i8Imm);
9519 } IEM_MC_ELSE() {
9520 IEM_MC_ADVANCE_RIP();
9521 } IEM_MC_ENDIF();
9522 IEM_MC_END();
9523 return VINF_SUCCESS;
9524}
9525
9526
9527/** Opcode 0x79. */
9528FNIEMOP_DEF(iemOp_jns_Jb)
9529{
9530 IEMOP_MNEMONIC("jns Jb");
9531 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9532 IEMOP_HLP_NO_LOCK_PREFIX();
9533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9534
9535 IEM_MC_BEGIN(0, 0);
9536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9537 IEM_MC_ADVANCE_RIP();
9538 } IEM_MC_ELSE() {
9539 IEM_MC_REL_JMP_S8(i8Imm);
9540 } IEM_MC_ENDIF();
9541 IEM_MC_END();
9542 return VINF_SUCCESS;
9543}
9544
9545
9546/** Opcode 0x7a. */
9547FNIEMOP_DEF(iemOp_jp_Jb)
9548{
9549 IEMOP_MNEMONIC("jp Jb");
9550 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9551 IEMOP_HLP_NO_LOCK_PREFIX();
9552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9553
9554 IEM_MC_BEGIN(0, 0);
9555 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9556 IEM_MC_REL_JMP_S8(i8Imm);
9557 } IEM_MC_ELSE() {
9558 IEM_MC_ADVANCE_RIP();
9559 } IEM_MC_ENDIF();
9560 IEM_MC_END();
9561 return VINF_SUCCESS;
9562}
9563
9564
9565/** Opcode 0x7b. */
9566FNIEMOP_DEF(iemOp_jnp_Jb)
9567{
9568 IEMOP_MNEMONIC("jnp Jb");
9569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9570 IEMOP_HLP_NO_LOCK_PREFIX();
9571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9572
9573 IEM_MC_BEGIN(0, 0);
9574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9575 IEM_MC_ADVANCE_RIP();
9576 } IEM_MC_ELSE() {
9577 IEM_MC_REL_JMP_S8(i8Imm);
9578 } IEM_MC_ENDIF();
9579 IEM_MC_END();
9580 return VINF_SUCCESS;
9581}
9582
9583
9584/** Opcode 0x7c. */
9585FNIEMOP_DEF(iemOp_jl_Jb)
9586{
9587 IEMOP_MNEMONIC("jl/jnge Jb");
9588 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9589 IEMOP_HLP_NO_LOCK_PREFIX();
9590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9591
9592 IEM_MC_BEGIN(0, 0);
9593 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9594 IEM_MC_REL_JMP_S8(i8Imm);
9595 } IEM_MC_ELSE() {
9596 IEM_MC_ADVANCE_RIP();
9597 } IEM_MC_ENDIF();
9598 IEM_MC_END();
9599 return VINF_SUCCESS;
9600}
9601
9602
9603/** Opcode 0x7d. */
9604FNIEMOP_DEF(iemOp_jnl_Jb)
9605{
9606 IEMOP_MNEMONIC("jnl/jge Jb");
9607 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9608 IEMOP_HLP_NO_LOCK_PREFIX();
9609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9610
9611 IEM_MC_BEGIN(0, 0);
9612 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9613 IEM_MC_ADVANCE_RIP();
9614 } IEM_MC_ELSE() {
9615 IEM_MC_REL_JMP_S8(i8Imm);
9616 } IEM_MC_ENDIF();
9617 IEM_MC_END();
9618 return VINF_SUCCESS;
9619}
9620
9621
9622/** Opcode 0x7e. */
9623FNIEMOP_DEF(iemOp_jle_Jb)
9624{
9625 IEMOP_MNEMONIC("jle/jng Jb");
9626 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9627 IEMOP_HLP_NO_LOCK_PREFIX();
9628 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9629
9630 IEM_MC_BEGIN(0, 0);
9631 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9632 IEM_MC_REL_JMP_S8(i8Imm);
9633 } IEM_MC_ELSE() {
9634 IEM_MC_ADVANCE_RIP();
9635 } IEM_MC_ENDIF();
9636 IEM_MC_END();
9637 return VINF_SUCCESS;
9638}
9639
9640
9641/** Opcode 0x7f. */
9642FNIEMOP_DEF(iemOp_jnle_Jb)
9643{
9644 IEMOP_MNEMONIC("jnle/jg Jb");
9645 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9646 IEMOP_HLP_NO_LOCK_PREFIX();
9647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9648
9649 IEM_MC_BEGIN(0, 0);
9650 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9651 IEM_MC_ADVANCE_RIP();
9652 } IEM_MC_ELSE() {
9653 IEM_MC_REL_JMP_S8(i8Imm);
9654 } IEM_MC_ENDIF();
9655 IEM_MC_END();
9656 return VINF_SUCCESS;
9657}
9658
9659
9660/** Opcode 0x80. */
9661FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9662{
9663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9664 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9665 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9666
9667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9668 {
9669 /* register target */
9670 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9671 IEMOP_HLP_NO_LOCK_PREFIX();
9672 IEM_MC_BEGIN(3, 0);
9673 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9674 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9675 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9676
9677 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9678 IEM_MC_REF_EFLAGS(pEFlags);
9679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9680
9681 IEM_MC_ADVANCE_RIP();
9682 IEM_MC_END();
9683 }
9684 else
9685 {
9686 /* memory target */
9687 uint32_t fAccess;
9688 if (pImpl->pfnLockedU8)
9689 fAccess = IEM_ACCESS_DATA_RW;
9690 else
9691 { /* CMP */
9692 IEMOP_HLP_NO_LOCK_PREFIX();
9693 fAccess = IEM_ACCESS_DATA_R;
9694 }
9695 IEM_MC_BEGIN(3, 2);
9696 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9697 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9699
9700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9701 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9702 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9703
9704 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9705 IEM_MC_FETCH_EFLAGS(EFlags);
9706 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9708 else
9709 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9710
9711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9712 IEM_MC_COMMIT_EFLAGS(EFlags);
9713 IEM_MC_ADVANCE_RIP();
9714 IEM_MC_END();
9715 }
9716 return VINF_SUCCESS;
9717}
9718
9719
9720/** Opcode 0x81. */
9721FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9722{
9723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9724 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9725 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9726
9727 switch (pIemCpu->enmEffOpSize)
9728 {
9729 case IEMMODE_16BIT:
9730 {
9731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9732 {
9733 /* register target */
9734 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9735 IEMOP_HLP_NO_LOCK_PREFIX();
9736 IEM_MC_BEGIN(3, 0);
9737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9738 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9739 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9740
9741 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9742 IEM_MC_REF_EFLAGS(pEFlags);
9743 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9744
9745 IEM_MC_ADVANCE_RIP();
9746 IEM_MC_END();
9747 }
9748 else
9749 {
9750 /* memory target */
9751 uint32_t fAccess;
9752 if (pImpl->pfnLockedU16)
9753 fAccess = IEM_ACCESS_DATA_RW;
9754 else
9755 { /* CMP, TEST */
9756 IEMOP_HLP_NO_LOCK_PREFIX();
9757 fAccess = IEM_ACCESS_DATA_R;
9758 }
9759 IEM_MC_BEGIN(3, 2);
9760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9761 IEM_MC_ARG(uint16_t, u16Src, 1);
9762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9764
9765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9766 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9767 IEM_MC_ASSIGN(u16Src, u16Imm);
9768 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9769 IEM_MC_FETCH_EFLAGS(EFlags);
9770 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9772 else
9773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9774
9775 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9776 IEM_MC_COMMIT_EFLAGS(EFlags);
9777 IEM_MC_ADVANCE_RIP();
9778 IEM_MC_END();
9779 }
9780 break;
9781 }
9782
9783 case IEMMODE_32BIT:
9784 {
9785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9786 {
9787 /* register target */
9788 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9789 IEMOP_HLP_NO_LOCK_PREFIX();
9790 IEM_MC_BEGIN(3, 0);
9791 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9792 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9793 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9794
9795 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9796 IEM_MC_REF_EFLAGS(pEFlags);
9797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9798 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9799
9800 IEM_MC_ADVANCE_RIP();
9801 IEM_MC_END();
9802 }
9803 else
9804 {
9805 /* memory target */
9806 uint32_t fAccess;
9807 if (pImpl->pfnLockedU32)
9808 fAccess = IEM_ACCESS_DATA_RW;
9809 else
9810 { /* CMP, TEST */
9811 IEMOP_HLP_NO_LOCK_PREFIX();
9812 fAccess = IEM_ACCESS_DATA_R;
9813 }
9814 IEM_MC_BEGIN(3, 2);
9815 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9816 IEM_MC_ARG(uint32_t, u32Src, 1);
9817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9819
9820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9821 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9822 IEM_MC_ASSIGN(u32Src, u32Imm);
9823 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9824 IEM_MC_FETCH_EFLAGS(EFlags);
9825 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9826 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9827 else
9828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9829
9830 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9831 IEM_MC_COMMIT_EFLAGS(EFlags);
9832 IEM_MC_ADVANCE_RIP();
9833 IEM_MC_END();
9834 }
9835 break;
9836 }
9837
9838 case IEMMODE_64BIT:
9839 {
9840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9841 {
9842 /* register target */
9843 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9844 IEMOP_HLP_NO_LOCK_PREFIX();
9845 IEM_MC_BEGIN(3, 0);
9846 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9847 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9849
9850 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9851 IEM_MC_REF_EFLAGS(pEFlags);
9852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9853
9854 IEM_MC_ADVANCE_RIP();
9855 IEM_MC_END();
9856 }
9857 else
9858 {
9859 /* memory target */
9860 uint32_t fAccess;
9861 if (pImpl->pfnLockedU64)
9862 fAccess = IEM_ACCESS_DATA_RW;
9863 else
9864 { /* CMP */
9865 IEMOP_HLP_NO_LOCK_PREFIX();
9866 fAccess = IEM_ACCESS_DATA_R;
9867 }
9868 IEM_MC_BEGIN(3, 2);
9869 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9870 IEM_MC_ARG(uint64_t, u64Src, 1);
9871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9873
9874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9875 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9876 IEM_MC_ASSIGN(u64Src, u64Imm);
9877 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9878 IEM_MC_FETCH_EFLAGS(EFlags);
9879 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9881 else
9882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9883
9884 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9885 IEM_MC_COMMIT_EFLAGS(EFlags);
9886 IEM_MC_ADVANCE_RIP();
9887 IEM_MC_END();
9888 }
9889 break;
9890 }
9891 }
9892 return VINF_SUCCESS;
9893}
9894
9895
9896/** Opcode 0x82. */
9897FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9898{
9899 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9900 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9901}
9902
9903
9904/** Opcode 0x83. */
9905FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9906{
9907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9908 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9909 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9910 to the 386 even if absent in the intel reference manuals and some
9911 3rd party opcode listings. */
9912 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9913
9914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9915 {
9916 /*
9917 * Register target
9918 */
9919 IEMOP_HLP_NO_LOCK_PREFIX();
9920 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9921 switch (pIemCpu->enmEffOpSize)
9922 {
9923 case IEMMODE_16BIT:
9924 {
9925 IEM_MC_BEGIN(3, 0);
9926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9927 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9929
9930 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9931 IEM_MC_REF_EFLAGS(pEFlags);
9932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9933
9934 IEM_MC_ADVANCE_RIP();
9935 IEM_MC_END();
9936 break;
9937 }
9938
9939 case IEMMODE_32BIT:
9940 {
9941 IEM_MC_BEGIN(3, 0);
9942 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9943 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9944 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9945
9946 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9947 IEM_MC_REF_EFLAGS(pEFlags);
9948 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9949 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9950
9951 IEM_MC_ADVANCE_RIP();
9952 IEM_MC_END();
9953 break;
9954 }
9955
9956 case IEMMODE_64BIT:
9957 {
9958 IEM_MC_BEGIN(3, 0);
9959 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9960 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9961 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9962
9963 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9964 IEM_MC_REF_EFLAGS(pEFlags);
9965 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9966
9967 IEM_MC_ADVANCE_RIP();
9968 IEM_MC_END();
9969 break;
9970 }
9971 }
9972 }
9973 else
9974 {
9975 /*
9976 * Memory target.
9977 */
9978 uint32_t fAccess;
9979 if (pImpl->pfnLockedU16)
9980 fAccess = IEM_ACCESS_DATA_RW;
9981 else
9982 { /* CMP */
9983 IEMOP_HLP_NO_LOCK_PREFIX();
9984 fAccess = IEM_ACCESS_DATA_R;
9985 }
9986
9987 switch (pIemCpu->enmEffOpSize)
9988 {
9989 case IEMMODE_16BIT:
9990 {
9991 IEM_MC_BEGIN(3, 2);
9992 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9993 IEM_MC_ARG(uint16_t, u16Src, 1);
9994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9996
9997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9998 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9999 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10000 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10001 IEM_MC_FETCH_EFLAGS(EFlags);
10002 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
10003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10004 else
10005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10006
10007 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10008 IEM_MC_COMMIT_EFLAGS(EFlags);
10009 IEM_MC_ADVANCE_RIP();
10010 IEM_MC_END();
10011 break;
10012 }
10013
10014 case IEMMODE_32BIT:
10015 {
10016 IEM_MC_BEGIN(3, 2);
10017 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10018 IEM_MC_ARG(uint32_t, u32Src, 1);
10019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10021
10022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10024 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10025 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10026 IEM_MC_FETCH_EFLAGS(EFlags);
10027 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
10028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10029 else
10030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10031
10032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10033 IEM_MC_COMMIT_EFLAGS(EFlags);
10034 IEM_MC_ADVANCE_RIP();
10035 IEM_MC_END();
10036 break;
10037 }
10038
10039 case IEMMODE_64BIT:
10040 {
10041 IEM_MC_BEGIN(3, 2);
10042 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10043 IEM_MC_ARG(uint64_t, u64Src, 1);
10044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10046
10047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10049 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10050 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10051 IEM_MC_FETCH_EFLAGS(EFlags);
10052 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
10053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10054 else
10055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10056
10057 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10058 IEM_MC_COMMIT_EFLAGS(EFlags);
10059 IEM_MC_ADVANCE_RIP();
10060 IEM_MC_END();
10061 break;
10062 }
10063 }
10064 }
10065 return VINF_SUCCESS;
10066}
10067
10068
10069/** Opcode 0x84. */
10070FNIEMOP_DEF(iemOp_test_Eb_Gb)
10071{
10072 IEMOP_MNEMONIC("test Eb,Gb");
10073 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
10074 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10075 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10076}
10077
10078
10079/** Opcode 0x85. */
10080FNIEMOP_DEF(iemOp_test_Ev_Gv)
10081{
10082 IEMOP_MNEMONIC("test Ev,Gv");
10083 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
10084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10085 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10086}
10087
10088
10089/** Opcode 0x86. */
10090FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10091{
10092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10093 IEMOP_MNEMONIC("xchg Eb,Gb");
10094
10095 /*
10096 * If rm is denoting a register, no more instruction bytes.
10097 */
10098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10099 {
10100 IEMOP_HLP_NO_LOCK_PREFIX();
10101
10102 IEM_MC_BEGIN(0, 2);
10103 IEM_MC_LOCAL(uint8_t, uTmp1);
10104 IEM_MC_LOCAL(uint8_t, uTmp2);
10105
10106 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10107 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10108 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10109 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10110
10111 IEM_MC_ADVANCE_RIP();
10112 IEM_MC_END();
10113 }
10114 else
10115 {
10116 /*
10117 * We're accessing memory.
10118 */
10119/** @todo the register must be committed separately! */
10120 IEM_MC_BEGIN(2, 2);
10121 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10122 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10124
10125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10126 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10127 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10128 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10129 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10130
10131 IEM_MC_ADVANCE_RIP();
10132 IEM_MC_END();
10133 }
10134 return VINF_SUCCESS;
10135}
10136
10137
10138/** Opcode 0x87. */
10139FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10140{
10141 IEMOP_MNEMONIC("xchg Ev,Gv");
10142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10143
10144 /*
10145 * If rm is denoting a register, no more instruction bytes.
10146 */
10147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10148 {
10149 IEMOP_HLP_NO_LOCK_PREFIX();
10150
10151 switch (pIemCpu->enmEffOpSize)
10152 {
10153 case IEMMODE_16BIT:
10154 IEM_MC_BEGIN(0, 2);
10155 IEM_MC_LOCAL(uint16_t, uTmp1);
10156 IEM_MC_LOCAL(uint16_t, uTmp2);
10157
10158 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10159 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10160 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10161 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10162
10163 IEM_MC_ADVANCE_RIP();
10164 IEM_MC_END();
10165 return VINF_SUCCESS;
10166
10167 case IEMMODE_32BIT:
10168 IEM_MC_BEGIN(0, 2);
10169 IEM_MC_LOCAL(uint32_t, uTmp1);
10170 IEM_MC_LOCAL(uint32_t, uTmp2);
10171
10172 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10173 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10174 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10175 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10176
10177 IEM_MC_ADVANCE_RIP();
10178 IEM_MC_END();
10179 return VINF_SUCCESS;
10180
10181 case IEMMODE_64BIT:
10182 IEM_MC_BEGIN(0, 2);
10183 IEM_MC_LOCAL(uint64_t, uTmp1);
10184 IEM_MC_LOCAL(uint64_t, uTmp2);
10185
10186 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10187 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10188 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10189 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10190
10191 IEM_MC_ADVANCE_RIP();
10192 IEM_MC_END();
10193 return VINF_SUCCESS;
10194
10195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10196 }
10197 }
10198 else
10199 {
10200 /*
10201 * We're accessing memory.
10202 */
10203 switch (pIemCpu->enmEffOpSize)
10204 {
10205/** @todo the register must be committed separately! */
10206 case IEMMODE_16BIT:
10207 IEM_MC_BEGIN(2, 2);
10208 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10209 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10211
10212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10213 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10214 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10215 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10216 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10217
10218 IEM_MC_ADVANCE_RIP();
10219 IEM_MC_END();
10220 return VINF_SUCCESS;
10221
10222 case IEMMODE_32BIT:
10223 IEM_MC_BEGIN(2, 2);
10224 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10225 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10227
10228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10229 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10230 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10231 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10232 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10233
10234 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10235 IEM_MC_ADVANCE_RIP();
10236 IEM_MC_END();
10237 return VINF_SUCCESS;
10238
10239 case IEMMODE_64BIT:
10240 IEM_MC_BEGIN(2, 2);
10241 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10242 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10244
10245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10246 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10247 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10248 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10250
10251 IEM_MC_ADVANCE_RIP();
10252 IEM_MC_END();
10253 return VINF_SUCCESS;
10254
10255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10256 }
10257 }
10258}
10259
10260
10261/** Opcode 0x88. */
10262FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10263{
10264 IEMOP_MNEMONIC("mov Eb,Gb");
10265
10266 uint8_t bRm;
10267 IEM_OPCODE_GET_NEXT_U8(&bRm);
10268 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10269
10270 /*
10271 * If rm is denoting a register, no more instruction bytes.
10272 */
10273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10274 {
10275 IEM_MC_BEGIN(0, 1);
10276 IEM_MC_LOCAL(uint8_t, u8Value);
10277 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10278 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10279 IEM_MC_ADVANCE_RIP();
10280 IEM_MC_END();
10281 }
10282 else
10283 {
10284 /*
10285 * We're writing a register to memory.
10286 */
10287 IEM_MC_BEGIN(0, 2);
10288 IEM_MC_LOCAL(uint8_t, u8Value);
10289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10291 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10292 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10293 IEM_MC_ADVANCE_RIP();
10294 IEM_MC_END();
10295 }
10296 return VINF_SUCCESS;
10297
10298}
10299
10300
10301/** Opcode 0x89. */
10302FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10303{
10304 IEMOP_MNEMONIC("mov Ev,Gv");
10305
10306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10307 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10308
10309 /*
10310 * If rm is denoting a register, no more instruction bytes.
10311 */
10312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10313 {
10314 switch (pIemCpu->enmEffOpSize)
10315 {
10316 case IEMMODE_16BIT:
10317 IEM_MC_BEGIN(0, 1);
10318 IEM_MC_LOCAL(uint16_t, u16Value);
10319 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10320 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10321 IEM_MC_ADVANCE_RIP();
10322 IEM_MC_END();
10323 break;
10324
10325 case IEMMODE_32BIT:
10326 IEM_MC_BEGIN(0, 1);
10327 IEM_MC_LOCAL(uint32_t, u32Value);
10328 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10329 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10330 IEM_MC_ADVANCE_RIP();
10331 IEM_MC_END();
10332 break;
10333
10334 case IEMMODE_64BIT:
10335 IEM_MC_BEGIN(0, 1);
10336 IEM_MC_LOCAL(uint64_t, u64Value);
10337 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10338 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10339 IEM_MC_ADVANCE_RIP();
10340 IEM_MC_END();
10341 break;
10342 }
10343 }
10344 else
10345 {
10346 /*
10347 * We're writing a register to memory.
10348 */
10349 switch (pIemCpu->enmEffOpSize)
10350 {
10351 case IEMMODE_16BIT:
10352 IEM_MC_BEGIN(0, 2);
10353 IEM_MC_LOCAL(uint16_t, u16Value);
10354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10356 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10357 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10358 IEM_MC_ADVANCE_RIP();
10359 IEM_MC_END();
10360 break;
10361
10362 case IEMMODE_32BIT:
10363 IEM_MC_BEGIN(0, 2);
10364 IEM_MC_LOCAL(uint32_t, u32Value);
10365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10367 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10368 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10369 IEM_MC_ADVANCE_RIP();
10370 IEM_MC_END();
10371 break;
10372
10373 case IEMMODE_64BIT:
10374 IEM_MC_BEGIN(0, 2);
10375 IEM_MC_LOCAL(uint64_t, u64Value);
10376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10378 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10379 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10380 IEM_MC_ADVANCE_RIP();
10381 IEM_MC_END();
10382 break;
10383 }
10384 }
10385 return VINF_SUCCESS;
10386}
10387
10388
10389/** Opcode 0x8a. */
10390FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10391{
10392 IEMOP_MNEMONIC("mov Gb,Eb");
10393
10394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10395 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10396
10397 /*
10398 * If rm is denoting a register, no more instruction bytes.
10399 */
10400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10401 {
10402 IEM_MC_BEGIN(0, 1);
10403 IEM_MC_LOCAL(uint8_t, u8Value);
10404 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10405 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10406 IEM_MC_ADVANCE_RIP();
10407 IEM_MC_END();
10408 }
10409 else
10410 {
10411 /*
10412 * We're loading a register from memory.
10413 */
10414 IEM_MC_BEGIN(0, 2);
10415 IEM_MC_LOCAL(uint8_t, u8Value);
10416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10418 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10419 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10420 IEM_MC_ADVANCE_RIP();
10421 IEM_MC_END();
10422 }
10423 return VINF_SUCCESS;
10424}
10425
10426
10427/** Opcode 0x8b. */
10428FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10429{
10430 IEMOP_MNEMONIC("mov Gv,Ev");
10431
10432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10433 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10434
10435 /*
10436 * If rm is denoting a register, no more instruction bytes.
10437 */
10438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10439 {
10440 switch (pIemCpu->enmEffOpSize)
10441 {
10442 case IEMMODE_16BIT:
10443 IEM_MC_BEGIN(0, 1);
10444 IEM_MC_LOCAL(uint16_t, u16Value);
10445 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10447 IEM_MC_ADVANCE_RIP();
10448 IEM_MC_END();
10449 break;
10450
10451 case IEMMODE_32BIT:
10452 IEM_MC_BEGIN(0, 1);
10453 IEM_MC_LOCAL(uint32_t, u32Value);
10454 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10455 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10456 IEM_MC_ADVANCE_RIP();
10457 IEM_MC_END();
10458 break;
10459
10460 case IEMMODE_64BIT:
10461 IEM_MC_BEGIN(0, 1);
10462 IEM_MC_LOCAL(uint64_t, u64Value);
10463 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10464 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10465 IEM_MC_ADVANCE_RIP();
10466 IEM_MC_END();
10467 break;
10468 }
10469 }
10470 else
10471 {
10472 /*
10473 * We're loading a register from memory.
10474 */
10475 switch (pIemCpu->enmEffOpSize)
10476 {
10477 case IEMMODE_16BIT:
10478 IEM_MC_BEGIN(0, 2);
10479 IEM_MC_LOCAL(uint16_t, u16Value);
10480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10482 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10483 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10484 IEM_MC_ADVANCE_RIP();
10485 IEM_MC_END();
10486 break;
10487
10488 case IEMMODE_32BIT:
10489 IEM_MC_BEGIN(0, 2);
10490 IEM_MC_LOCAL(uint32_t, u32Value);
10491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10493 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10494 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10495 IEM_MC_ADVANCE_RIP();
10496 IEM_MC_END();
10497 break;
10498
10499 case IEMMODE_64BIT:
10500 IEM_MC_BEGIN(0, 2);
10501 IEM_MC_LOCAL(uint64_t, u64Value);
10502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10504 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10505 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10506 IEM_MC_ADVANCE_RIP();
10507 IEM_MC_END();
10508 break;
10509 }
10510 }
10511 return VINF_SUCCESS;
10512}
10513
10514
10515/** Opcode 0x63. */
10516FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10517{
10518 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10519 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10520 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10521 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10522 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10523}
10524
10525
10526/** Opcode 0x8c. */
10527FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10528{
10529 IEMOP_MNEMONIC("mov Ev,Sw");
10530
10531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10532 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10533
10534 /*
10535 * Check that the destination register exists. The REX.R prefix is ignored.
10536 */
10537 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10538 if ( iSegReg > X86_SREG_GS)
10539 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10540
10541 /*
10542 * If rm is denoting a register, no more instruction bytes.
10543 * In that case, the operand size is respected and the upper bits are
10544 * cleared (starting with some pentium).
10545 */
10546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10547 {
10548 switch (pIemCpu->enmEffOpSize)
10549 {
10550 case IEMMODE_16BIT:
10551 IEM_MC_BEGIN(0, 1);
10552 IEM_MC_LOCAL(uint16_t, u16Value);
10553 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10555 IEM_MC_ADVANCE_RIP();
10556 IEM_MC_END();
10557 break;
10558
10559 case IEMMODE_32BIT:
10560 IEM_MC_BEGIN(0, 1);
10561 IEM_MC_LOCAL(uint32_t, u32Value);
10562 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10564 IEM_MC_ADVANCE_RIP();
10565 IEM_MC_END();
10566 break;
10567
10568 case IEMMODE_64BIT:
10569 IEM_MC_BEGIN(0, 1);
10570 IEM_MC_LOCAL(uint64_t, u64Value);
10571 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10573 IEM_MC_ADVANCE_RIP();
10574 IEM_MC_END();
10575 break;
10576 }
10577 }
10578 else
10579 {
10580 /*
10581 * We're saving the register to memory. The access is word sized
10582 * regardless of operand size prefixes.
10583 */
10584#if 0 /* not necessary */
10585 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10586#endif
10587 IEM_MC_BEGIN(0, 2);
10588 IEM_MC_LOCAL(uint16_t, u16Value);
10589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10591 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10592 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10593 IEM_MC_ADVANCE_RIP();
10594 IEM_MC_END();
10595 }
10596 return VINF_SUCCESS;
10597}
10598
10599
10600
10601
10602/** Opcode 0x8d. */
10603FNIEMOP_DEF(iemOp_lea_Gv_M)
10604{
10605 IEMOP_MNEMONIC("lea Gv,M");
10606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10607 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10609 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10610
10611 switch (pIemCpu->enmEffOpSize)
10612 {
10613 case IEMMODE_16BIT:
10614 IEM_MC_BEGIN(0, 2);
10615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10616 IEM_MC_LOCAL(uint16_t, u16Cast);
10617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10618 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10619 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10620 IEM_MC_ADVANCE_RIP();
10621 IEM_MC_END();
10622 return VINF_SUCCESS;
10623
10624 case IEMMODE_32BIT:
10625 IEM_MC_BEGIN(0, 2);
10626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10627 IEM_MC_LOCAL(uint32_t, u32Cast);
10628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10629 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10630 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10631 IEM_MC_ADVANCE_RIP();
10632 IEM_MC_END();
10633 return VINF_SUCCESS;
10634
10635 case IEMMODE_64BIT:
10636 IEM_MC_BEGIN(0, 1);
10637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10639 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10640 IEM_MC_ADVANCE_RIP();
10641 IEM_MC_END();
10642 return VINF_SUCCESS;
10643 }
10644 AssertFailedReturn(VERR_IEM_IPE_7);
10645}
10646
10647
10648/** Opcode 0x8e. */
10649FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10650{
10651 IEMOP_MNEMONIC("mov Sw,Ev");
10652
10653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10654 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10655
10656 /*
10657 * The practical operand size is 16-bit.
10658 */
10659#if 0 /* not necessary */
10660 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10661#endif
10662
10663 /*
10664 * Check that the destination register exists and can be used with this
10665 * instruction. The REX.R prefix is ignored.
10666 */
10667 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10668 if ( iSegReg == X86_SREG_CS
10669 || iSegReg > X86_SREG_GS)
10670 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10671
10672 /*
10673 * If rm is denoting a register, no more instruction bytes.
10674 */
10675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10676 {
10677 IEM_MC_BEGIN(2, 0);
10678 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10679 IEM_MC_ARG(uint16_t, u16Value, 1);
10680 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10681 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10682 IEM_MC_END();
10683 }
10684 else
10685 {
10686 /*
10687 * We're loading the register from memory. The access is word sized
10688 * regardless of operand size prefixes.
10689 */
10690 IEM_MC_BEGIN(2, 1);
10691 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10692 IEM_MC_ARG(uint16_t, u16Value, 1);
10693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10695 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10696 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10697 IEM_MC_END();
10698 }
10699 return VINF_SUCCESS;
10700}
10701
10702
10703/** Opcode 0x8f /0. */
10704FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10705{
10706 /* This bugger is rather annoying as it requires rSP to be updated before
10707 doing the effective address calculations. Will eventually require a
10708 split between the R/M+SIB decoding and the effective address
10709 calculation - which is something that is required for any attempt at
10710 reusing this code for a recompiler. It may also be good to have if we
10711 need to delay #UD exception caused by invalid lock prefixes.
10712
10713 For now, we'll do a mostly safe interpreter-only implementation here. */
10714 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10715 * now until tests show it's checked.. */
10716 IEMOP_MNEMONIC("pop Ev");
10717 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10718
10719 /* Register access is relatively easy and can share code. */
10720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10721 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10722
10723 /*
10724 * Memory target.
10725 *
10726 * Intel says that RSP is incremented before it's used in any effective
10727 * address calcuations. This means some serious extra annoyance here since
10728 * we decode and calculate the effective address in one step and like to
10729 * delay committing registers till everything is done.
10730 *
10731 * So, we'll decode and calculate the effective address twice. This will
10732 * require some recoding if turned into a recompiler.
10733 */
10734 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10735
10736#ifndef TST_IEM_CHECK_MC
10737 /* Calc effective address with modified ESP. */
10738 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10739 RTGCPTR GCPtrEff;
10740 VBOXSTRICTRC rcStrict;
10741 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10742 if (rcStrict != VINF_SUCCESS)
10743 return rcStrict;
10744 pIemCpu->offOpcode = offOpcodeSaved;
10745
10746 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10747 uint64_t const RspSaved = pCtx->rsp;
10748 switch (pIemCpu->enmEffOpSize)
10749 {
10750 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10751 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10752 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10754 }
10755 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10756 Assert(rcStrict == VINF_SUCCESS);
10757 pCtx->rsp = RspSaved;
10758
10759 /* Perform the operation - this should be CImpl. */
10760 RTUINT64U TmpRsp;
10761 TmpRsp.u = pCtx->rsp;
10762 switch (pIemCpu->enmEffOpSize)
10763 {
10764 case IEMMODE_16BIT:
10765 {
10766 uint16_t u16Value;
10767 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10768 if (rcStrict == VINF_SUCCESS)
10769 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10770 break;
10771 }
10772
10773 case IEMMODE_32BIT:
10774 {
10775 uint32_t u32Value;
10776 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10777 if (rcStrict == VINF_SUCCESS)
10778 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10779 break;
10780 }
10781
10782 case IEMMODE_64BIT:
10783 {
10784 uint64_t u64Value;
10785 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10786 if (rcStrict == VINF_SUCCESS)
10787 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10788 break;
10789 }
10790
10791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10792 }
10793 if (rcStrict == VINF_SUCCESS)
10794 {
10795 pCtx->rsp = TmpRsp.u;
10796 iemRegUpdateRipAndClearRF(pIemCpu);
10797 }
10798 return rcStrict;
10799
10800#else
10801 return VERR_IEM_IPE_2;
10802#endif
10803}
10804
10805
10806/** Opcode 0x8f. */
10807FNIEMOP_DEF(iemOp_Grp1A)
10808{
10809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10810 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10811 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10812
10813 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10814 /** @todo XOP decoding. */
10815 IEMOP_MNEMONIC("3-byte-xop");
10816 return IEMOP_RAISE_INVALID_OPCODE();
10817}
10818
10819
10820/**
10821 * Common 'xchg reg,rAX' helper.
10822 */
10823FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10824{
10825 IEMOP_HLP_NO_LOCK_PREFIX();
10826
10827 iReg |= pIemCpu->uRexB;
10828 switch (pIemCpu->enmEffOpSize)
10829 {
10830 case IEMMODE_16BIT:
10831 IEM_MC_BEGIN(0, 2);
10832 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10833 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10834 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10835 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10836 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10837 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10838 IEM_MC_ADVANCE_RIP();
10839 IEM_MC_END();
10840 return VINF_SUCCESS;
10841
10842 case IEMMODE_32BIT:
10843 IEM_MC_BEGIN(0, 2);
10844 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10845 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10846 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10847 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10848 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10849 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10850 IEM_MC_ADVANCE_RIP();
10851 IEM_MC_END();
10852 return VINF_SUCCESS;
10853
10854 case IEMMODE_64BIT:
10855 IEM_MC_BEGIN(0, 2);
10856 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10857 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10858 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10859 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10860 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10861 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10862 IEM_MC_ADVANCE_RIP();
10863 IEM_MC_END();
10864 return VINF_SUCCESS;
10865
10866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10867 }
10868}
10869
10870
10871/** Opcode 0x90. */
10872FNIEMOP_DEF(iemOp_nop)
10873{
10874 /* R8/R8D and RAX/EAX can be exchanged. */
10875 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10876 {
10877 IEMOP_MNEMONIC("xchg r8,rAX");
10878 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10879 }
10880
10881 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10882 IEMOP_MNEMONIC("pause");
10883 else
10884 IEMOP_MNEMONIC("nop");
10885 IEM_MC_BEGIN(0, 0);
10886 IEM_MC_ADVANCE_RIP();
10887 IEM_MC_END();
10888 return VINF_SUCCESS;
10889}
10890
10891
10892/** Opcode 0x91. */
10893FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10894{
10895 IEMOP_MNEMONIC("xchg rCX,rAX");
10896 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10897}
10898
10899
10900/** Opcode 0x92. */
10901FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10902{
10903 IEMOP_MNEMONIC("xchg rDX,rAX");
10904 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10905}
10906
10907
10908/** Opcode 0x93. */
10909FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10910{
10911 IEMOP_MNEMONIC("xchg rBX,rAX");
10912 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10913}
10914
10915
10916/** Opcode 0x94. */
10917FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10918{
10919 IEMOP_MNEMONIC("xchg rSX,rAX");
10920 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10921}
10922
10923
10924/** Opcode 0x95. */
10925FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10926{
10927 IEMOP_MNEMONIC("xchg rBP,rAX");
10928 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10929}
10930
10931
10932/** Opcode 0x96. */
10933FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10934{
10935 IEMOP_MNEMONIC("xchg rSI,rAX");
10936 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10937}
10938
10939
10940/** Opcode 0x97. */
10941FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10942{
10943 IEMOP_MNEMONIC("xchg rDI,rAX");
10944 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10945}
10946
10947
10948/** Opcode 0x98. */
10949FNIEMOP_DEF(iemOp_cbw)
10950{
10951 IEMOP_HLP_NO_LOCK_PREFIX();
10952 switch (pIemCpu->enmEffOpSize)
10953 {
10954 case IEMMODE_16BIT:
10955 IEMOP_MNEMONIC("cbw");
10956 IEM_MC_BEGIN(0, 1);
10957 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10958 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10959 } IEM_MC_ELSE() {
10960 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10961 } IEM_MC_ENDIF();
10962 IEM_MC_ADVANCE_RIP();
10963 IEM_MC_END();
10964 return VINF_SUCCESS;
10965
10966 case IEMMODE_32BIT:
10967 IEMOP_MNEMONIC("cwde");
10968 IEM_MC_BEGIN(0, 1);
10969 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10970 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10971 } IEM_MC_ELSE() {
10972 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10973 } IEM_MC_ENDIF();
10974 IEM_MC_ADVANCE_RIP();
10975 IEM_MC_END();
10976 return VINF_SUCCESS;
10977
10978 case IEMMODE_64BIT:
10979 IEMOP_MNEMONIC("cdqe");
10980 IEM_MC_BEGIN(0, 1);
10981 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10982 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10983 } IEM_MC_ELSE() {
10984 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10985 } IEM_MC_ENDIF();
10986 IEM_MC_ADVANCE_RIP();
10987 IEM_MC_END();
10988 return VINF_SUCCESS;
10989
10990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10991 }
10992}
10993
10994
10995/** Opcode 0x99. */
10996FNIEMOP_DEF(iemOp_cwd)
10997{
10998 IEMOP_HLP_NO_LOCK_PREFIX();
10999 switch (pIemCpu->enmEffOpSize)
11000 {
11001 case IEMMODE_16BIT:
11002 IEMOP_MNEMONIC("cwd");
11003 IEM_MC_BEGIN(0, 1);
11004 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11005 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11006 } IEM_MC_ELSE() {
11007 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11008 } IEM_MC_ENDIF();
11009 IEM_MC_ADVANCE_RIP();
11010 IEM_MC_END();
11011 return VINF_SUCCESS;
11012
11013 case IEMMODE_32BIT:
11014 IEMOP_MNEMONIC("cdq");
11015 IEM_MC_BEGIN(0, 1);
11016 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11017 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11018 } IEM_MC_ELSE() {
11019 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11020 } IEM_MC_ENDIF();
11021 IEM_MC_ADVANCE_RIP();
11022 IEM_MC_END();
11023 return VINF_SUCCESS;
11024
11025 case IEMMODE_64BIT:
11026 IEMOP_MNEMONIC("cqo");
11027 IEM_MC_BEGIN(0, 1);
11028 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11029 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11030 } IEM_MC_ELSE() {
11031 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11032 } IEM_MC_ENDIF();
11033 IEM_MC_ADVANCE_RIP();
11034 IEM_MC_END();
11035 return VINF_SUCCESS;
11036
11037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11038 }
11039}
11040
11041
11042/** Opcode 0x9a. */
11043FNIEMOP_DEF(iemOp_call_Ap)
11044{
11045 IEMOP_MNEMONIC("call Ap");
11046 IEMOP_HLP_NO_64BIT();
11047
11048 /* Decode the far pointer address and pass it on to the far call C implementation. */
11049 uint32_t offSeg;
11050 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
11051 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11052 else
11053 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11054 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11056 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
11057}
11058
11059
11060/** Opcode 0x9b. (aka fwait) */
11061FNIEMOP_DEF(iemOp_wait)
11062{
11063 IEMOP_MNEMONIC("wait");
11064 IEMOP_HLP_NO_LOCK_PREFIX();
11065
11066 IEM_MC_BEGIN(0, 0);
11067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11069 IEM_MC_ADVANCE_RIP();
11070 IEM_MC_END();
11071 return VINF_SUCCESS;
11072}
11073
11074
11075/** Opcode 0x9c. */
11076FNIEMOP_DEF(iemOp_pushf_Fv)
11077{
11078 IEMOP_HLP_NO_LOCK_PREFIX();
11079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11080 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
11081}
11082
11083
11084/** Opcode 0x9d. */
11085FNIEMOP_DEF(iemOp_popf_Fv)
11086{
11087 IEMOP_HLP_NO_LOCK_PREFIX();
11088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11089 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
11090}
11091
11092
11093/** Opcode 0x9e. */
11094FNIEMOP_DEF(iemOp_sahf)
11095{
11096 IEMOP_MNEMONIC("sahf");
11097 IEMOP_HLP_NO_LOCK_PREFIX();
11098 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11099 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11100 return IEMOP_RAISE_INVALID_OPCODE();
11101 IEM_MC_BEGIN(0, 2);
11102 IEM_MC_LOCAL(uint32_t, u32Flags);
11103 IEM_MC_LOCAL(uint32_t, EFlags);
11104 IEM_MC_FETCH_EFLAGS(EFlags);
11105 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11106 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11107 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11108 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11109 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11110 IEM_MC_COMMIT_EFLAGS(EFlags);
11111 IEM_MC_ADVANCE_RIP();
11112 IEM_MC_END();
11113 return VINF_SUCCESS;
11114}
11115
11116
11117/** Opcode 0x9f. */
11118FNIEMOP_DEF(iemOp_lahf)
11119{
11120 IEMOP_MNEMONIC("lahf");
11121 IEMOP_HLP_NO_LOCK_PREFIX();
11122 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11123 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11124 return IEMOP_RAISE_INVALID_OPCODE();
11125 IEM_MC_BEGIN(0, 1);
11126 IEM_MC_LOCAL(uint8_t, u8Flags);
11127 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11128 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11129 IEM_MC_ADVANCE_RIP();
11130 IEM_MC_END();
11131 return VINF_SUCCESS;
11132}
11133
11134
11135/**
11136 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11137 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11138 * prefixes. Will return on failures.
11139 * @param a_GCPtrMemOff The variable to store the offset in.
11140 */
11141#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11142 do \
11143 { \
11144 switch (pIemCpu->enmEffAddrMode) \
11145 { \
11146 case IEMMODE_16BIT: \
11147 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11148 break; \
11149 case IEMMODE_32BIT: \
11150 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11151 break; \
11152 case IEMMODE_64BIT: \
11153 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11154 break; \
11155 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11156 } \
11157 IEMOP_HLP_NO_LOCK_PREFIX(); \
11158 } while (0)
11159
11160/** Opcode 0xa0. */
11161FNIEMOP_DEF(iemOp_mov_Al_Ob)
11162{
11163 /*
11164 * Get the offset and fend of lock prefixes.
11165 */
11166 RTGCPTR GCPtrMemOff;
11167 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11168
11169 /*
11170 * Fetch AL.
11171 */
11172 IEM_MC_BEGIN(0,1);
11173 IEM_MC_LOCAL(uint8_t, u8Tmp);
11174 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11175 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11176 IEM_MC_ADVANCE_RIP();
11177 IEM_MC_END();
11178 return VINF_SUCCESS;
11179}
11180
11181
11182/** Opcode 0xa1. */
11183FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11184{
11185 /*
11186 * Get the offset and fend of lock prefixes.
11187 */
11188 IEMOP_MNEMONIC("mov rAX,Ov");
11189 RTGCPTR GCPtrMemOff;
11190 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11191
11192 /*
11193 * Fetch rAX.
11194 */
11195 switch (pIemCpu->enmEffOpSize)
11196 {
11197 case IEMMODE_16BIT:
11198 IEM_MC_BEGIN(0,1);
11199 IEM_MC_LOCAL(uint16_t, u16Tmp);
11200 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11201 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11202 IEM_MC_ADVANCE_RIP();
11203 IEM_MC_END();
11204 return VINF_SUCCESS;
11205
11206 case IEMMODE_32BIT:
11207 IEM_MC_BEGIN(0,1);
11208 IEM_MC_LOCAL(uint32_t, u32Tmp);
11209 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11210 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11211 IEM_MC_ADVANCE_RIP();
11212 IEM_MC_END();
11213 return VINF_SUCCESS;
11214
11215 case IEMMODE_64BIT:
11216 IEM_MC_BEGIN(0,1);
11217 IEM_MC_LOCAL(uint64_t, u64Tmp);
11218 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11219 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11220 IEM_MC_ADVANCE_RIP();
11221 IEM_MC_END();
11222 return VINF_SUCCESS;
11223
11224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11225 }
11226}
11227
11228
11229/** Opcode 0xa2. */
11230FNIEMOP_DEF(iemOp_mov_Ob_AL)
11231{
11232 /*
11233 * Get the offset and fend of lock prefixes.
11234 */
11235 RTGCPTR GCPtrMemOff;
11236 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11237
11238 /*
11239 * Store AL.
11240 */
11241 IEM_MC_BEGIN(0,1);
11242 IEM_MC_LOCAL(uint8_t, u8Tmp);
11243 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11244 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
11245 IEM_MC_ADVANCE_RIP();
11246 IEM_MC_END();
11247 return VINF_SUCCESS;
11248}
11249
11250
11251/** Opcode 0xa3. */
11252FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11253{
11254 /*
11255 * Get the offset and fend of lock prefixes.
11256 */
11257 RTGCPTR GCPtrMemOff;
11258 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11259
11260 /*
11261 * Store rAX.
11262 */
11263 switch (pIemCpu->enmEffOpSize)
11264 {
11265 case IEMMODE_16BIT:
11266 IEM_MC_BEGIN(0,1);
11267 IEM_MC_LOCAL(uint16_t, u16Tmp);
11268 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11269 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
11270 IEM_MC_ADVANCE_RIP();
11271 IEM_MC_END();
11272 return VINF_SUCCESS;
11273
11274 case IEMMODE_32BIT:
11275 IEM_MC_BEGIN(0,1);
11276 IEM_MC_LOCAL(uint32_t, u32Tmp);
11277 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11278 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11279 IEM_MC_ADVANCE_RIP();
11280 IEM_MC_END();
11281 return VINF_SUCCESS;
11282
11283 case IEMMODE_64BIT:
11284 IEM_MC_BEGIN(0,1);
11285 IEM_MC_LOCAL(uint64_t, u64Tmp);
11286 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11287 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11288 IEM_MC_ADVANCE_RIP();
11289 IEM_MC_END();
11290 return VINF_SUCCESS;
11291
11292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11293 }
11294}
11295
11296/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11297#define IEM_MOVS_CASE(ValBits, AddrBits) \
11298 IEM_MC_BEGIN(0, 2); \
11299 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11300 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11301 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11302 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11303 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11304 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11305 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11306 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11307 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11308 } IEM_MC_ELSE() { \
11309 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11310 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11311 } IEM_MC_ENDIF(); \
11312 IEM_MC_ADVANCE_RIP(); \
11313 IEM_MC_END();
11314
11315/** Opcode 0xa4. */
11316FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11317{
11318 IEMOP_HLP_NO_LOCK_PREFIX();
11319
11320 /*
11321 * Use the C implementation if a repeat prefix is encountered.
11322 */
11323 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11324 {
11325 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11326 switch (pIemCpu->enmEffAddrMode)
11327 {
11328 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11329 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11330 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11332 }
11333 }
11334 IEMOP_MNEMONIC("movsb Xb,Yb");
11335
11336 /*
11337 * Sharing case implementation with movs[wdq] below.
11338 */
11339 switch (pIemCpu->enmEffAddrMode)
11340 {
11341 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11342 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11343 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11345 }
11346 return VINF_SUCCESS;
11347}
11348
11349
11350/** Opcode 0xa5. */
11351FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11352{
11353 IEMOP_HLP_NO_LOCK_PREFIX();
11354
11355 /*
11356 * Use the C implementation if a repeat prefix is encountered.
11357 */
11358 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11359 {
11360 IEMOP_MNEMONIC("rep movs Xv,Yv");
11361 switch (pIemCpu->enmEffOpSize)
11362 {
11363 case IEMMODE_16BIT:
11364 switch (pIemCpu->enmEffAddrMode)
11365 {
11366 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11367 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11368 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11370 }
11371 break;
11372 case IEMMODE_32BIT:
11373 switch (pIemCpu->enmEffAddrMode)
11374 {
11375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11379 }
11380 case IEMMODE_64BIT:
11381 switch (pIemCpu->enmEffAddrMode)
11382 {
11383 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11384 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11385 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11387 }
11388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11389 }
11390 }
11391 IEMOP_MNEMONIC("movs Xv,Yv");
11392
11393 /*
11394 * Annoying double switch here.
11395 * Using ugly macro for implementing the cases, sharing it with movsb.
11396 */
11397 switch (pIemCpu->enmEffOpSize)
11398 {
11399 case IEMMODE_16BIT:
11400 switch (pIemCpu->enmEffAddrMode)
11401 {
11402 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11403 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11404 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11406 }
11407 break;
11408
11409 case IEMMODE_32BIT:
11410 switch (pIemCpu->enmEffAddrMode)
11411 {
11412 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11413 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11414 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11416 }
11417 break;
11418
11419 case IEMMODE_64BIT:
11420 switch (pIemCpu->enmEffAddrMode)
11421 {
11422 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11423 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11424 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11426 }
11427 break;
11428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11429 }
11430 return VINF_SUCCESS;
11431}
11432
11433#undef IEM_MOVS_CASE
11434
11435/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11436#define IEM_CMPS_CASE(ValBits, AddrBits) \
11437 IEM_MC_BEGIN(3, 3); \
11438 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11439 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11440 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11441 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11442 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11443 \
11444 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11445 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11446 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11447 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11448 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11449 IEM_MC_REF_EFLAGS(pEFlags); \
11450 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11451 \
11452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11453 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11454 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11455 } IEM_MC_ELSE() { \
11456 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11457 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11458 } IEM_MC_ENDIF(); \
11459 IEM_MC_ADVANCE_RIP(); \
11460 IEM_MC_END(); \
11461
11462/** Opcode 0xa6. */
11463FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11464{
11465 IEMOP_HLP_NO_LOCK_PREFIX();
11466
11467 /*
11468 * Use the C implementation if a repeat prefix is encountered.
11469 */
11470 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11471 {
11472 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11473 switch (pIemCpu->enmEffAddrMode)
11474 {
11475 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11476 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11477 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11479 }
11480 }
11481 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11482 {
11483 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11484 switch (pIemCpu->enmEffAddrMode)
11485 {
11486 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11487 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11488 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 }
11492 IEMOP_MNEMONIC("cmps Xb,Yb");
11493
11494 /*
11495 * Sharing case implementation with cmps[wdq] below.
11496 */
11497 switch (pIemCpu->enmEffAddrMode)
11498 {
11499 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11500 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11501 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11503 }
11504 return VINF_SUCCESS;
11505
11506}
11507
11508
11509/** Opcode 0xa7. */
11510FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11511{
11512 IEMOP_HLP_NO_LOCK_PREFIX();
11513
11514 /*
11515 * Use the C implementation if a repeat prefix is encountered.
11516 */
11517 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11518 {
11519 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11520 switch (pIemCpu->enmEffOpSize)
11521 {
11522 case IEMMODE_16BIT:
11523 switch (pIemCpu->enmEffAddrMode)
11524 {
11525 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11526 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11527 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11529 }
11530 break;
11531 case IEMMODE_32BIT:
11532 switch (pIemCpu->enmEffAddrMode)
11533 {
11534 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11535 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11536 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11538 }
11539 case IEMMODE_64BIT:
11540 switch (pIemCpu->enmEffAddrMode)
11541 {
11542 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11543 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11544 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11546 }
11547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11548 }
11549 }
11550
11551 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11552 {
11553 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11554 switch (pIemCpu->enmEffOpSize)
11555 {
11556 case IEMMODE_16BIT:
11557 switch (pIemCpu->enmEffAddrMode)
11558 {
11559 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11560 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11561 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11563 }
11564 break;
11565 case IEMMODE_32BIT:
11566 switch (pIemCpu->enmEffAddrMode)
11567 {
11568 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11569 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11570 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11572 }
11573 case IEMMODE_64BIT:
11574 switch (pIemCpu->enmEffAddrMode)
11575 {
11576 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11577 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11578 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11580 }
11581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11582 }
11583 }
11584
11585 IEMOP_MNEMONIC("cmps Xv,Yv");
11586
11587 /*
11588 * Annoying double switch here.
11589 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11590 */
11591 switch (pIemCpu->enmEffOpSize)
11592 {
11593 case IEMMODE_16BIT:
11594 switch (pIemCpu->enmEffAddrMode)
11595 {
11596 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11597 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11598 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11600 }
11601 break;
11602
11603 case IEMMODE_32BIT:
11604 switch (pIemCpu->enmEffAddrMode)
11605 {
11606 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11607 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11608 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11610 }
11611 break;
11612
11613 case IEMMODE_64BIT:
11614 switch (pIemCpu->enmEffAddrMode)
11615 {
11616 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11617 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11618 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11620 }
11621 break;
11622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11623 }
11624 return VINF_SUCCESS;
11625
11626}
11627
11628#undef IEM_CMPS_CASE
11629
11630/** Opcode 0xa8. */
11631FNIEMOP_DEF(iemOp_test_AL_Ib)
11632{
11633 IEMOP_MNEMONIC("test al,Ib");
11634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11636}
11637
11638
11639/** Opcode 0xa9. */
11640FNIEMOP_DEF(iemOp_test_eAX_Iz)
11641{
11642 IEMOP_MNEMONIC("test rAX,Iz");
11643 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11644 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11645}
11646
11647
11648/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11649#define IEM_STOS_CASE(ValBits, AddrBits) \
11650 IEM_MC_BEGIN(0, 2); \
11651 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11652 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11653 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11654 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11655 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11657 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11658 } IEM_MC_ELSE() { \
11659 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11660 } IEM_MC_ENDIF(); \
11661 IEM_MC_ADVANCE_RIP(); \
11662 IEM_MC_END(); \
11663
11664/** Opcode 0xaa. */
11665FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11666{
11667 IEMOP_HLP_NO_LOCK_PREFIX();
11668
11669 /*
11670 * Use the C implementation if a repeat prefix is encountered.
11671 */
11672 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11673 {
11674 IEMOP_MNEMONIC("rep stos Yb,al");
11675 switch (pIemCpu->enmEffAddrMode)
11676 {
11677 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11678 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11679 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11681 }
11682 }
11683 IEMOP_MNEMONIC("stos Yb,al");
11684
11685 /*
11686 * Sharing case implementation with stos[wdq] below.
11687 */
11688 switch (pIemCpu->enmEffAddrMode)
11689 {
11690 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11691 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11692 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11694 }
11695 return VINF_SUCCESS;
11696}
11697
11698
11699/** Opcode 0xab. */
11700FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11701{
11702 IEMOP_HLP_NO_LOCK_PREFIX();
11703
11704 /*
11705 * Use the C implementation if a repeat prefix is encountered.
11706 */
11707 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11708 {
11709 IEMOP_MNEMONIC("rep stos Yv,rAX");
11710 switch (pIemCpu->enmEffOpSize)
11711 {
11712 case IEMMODE_16BIT:
11713 switch (pIemCpu->enmEffAddrMode)
11714 {
11715 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11719 }
11720 break;
11721 case IEMMODE_32BIT:
11722 switch (pIemCpu->enmEffAddrMode)
11723 {
11724 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11725 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11726 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11728 }
11729 case IEMMODE_64BIT:
11730 switch (pIemCpu->enmEffAddrMode)
11731 {
11732 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11733 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11734 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11736 }
11737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11738 }
11739 }
11740 IEMOP_MNEMONIC("stos Yv,rAX");
11741
11742 /*
11743 * Annoying double switch here.
11744 * Using ugly macro for implementing the cases, sharing it with stosb.
11745 */
11746 switch (pIemCpu->enmEffOpSize)
11747 {
11748 case IEMMODE_16BIT:
11749 switch (pIemCpu->enmEffAddrMode)
11750 {
11751 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11752 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11753 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11755 }
11756 break;
11757
11758 case IEMMODE_32BIT:
11759 switch (pIemCpu->enmEffAddrMode)
11760 {
11761 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11762 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11763 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11765 }
11766 break;
11767
11768 case IEMMODE_64BIT:
11769 switch (pIemCpu->enmEffAddrMode)
11770 {
11771 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11772 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11773 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11775 }
11776 break;
11777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11778 }
11779 return VINF_SUCCESS;
11780}
11781
11782#undef IEM_STOS_CASE
11783
11784/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11785#define IEM_LODS_CASE(ValBits, AddrBits) \
11786 IEM_MC_BEGIN(0, 2); \
11787 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11788 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11789 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11790 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11791 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11792 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11793 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11794 } IEM_MC_ELSE() { \
11795 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11796 } IEM_MC_ENDIF(); \
11797 IEM_MC_ADVANCE_RIP(); \
11798 IEM_MC_END();
11799
11800/** Opcode 0xac. */
11801FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11802{
11803 IEMOP_HLP_NO_LOCK_PREFIX();
11804
11805 /*
11806 * Use the C implementation if a repeat prefix is encountered.
11807 */
11808 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11809 {
11810 IEMOP_MNEMONIC("rep lodsb al,Xb");
11811 switch (pIemCpu->enmEffAddrMode)
11812 {
11813 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11814 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11815 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11817 }
11818 }
11819 IEMOP_MNEMONIC("lodsb al,Xb");
11820
11821 /*
11822 * Sharing case implementation with stos[wdq] below.
11823 */
11824 switch (pIemCpu->enmEffAddrMode)
11825 {
11826 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11827 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11828 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11830 }
11831 return VINF_SUCCESS;
11832}
11833
11834
11835/** Opcode 0xad. */
11836FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11837{
11838 IEMOP_HLP_NO_LOCK_PREFIX();
11839
11840 /*
11841 * Use the C implementation if a repeat prefix is encountered.
11842 */
11843 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11844 {
11845 IEMOP_MNEMONIC("rep lods rAX,Xv");
11846 switch (pIemCpu->enmEffOpSize)
11847 {
11848 case IEMMODE_16BIT:
11849 switch (pIemCpu->enmEffAddrMode)
11850 {
11851 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11852 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11853 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11855 }
11856 break;
11857 case IEMMODE_32BIT:
11858 switch (pIemCpu->enmEffAddrMode)
11859 {
11860 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11861 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11862 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11864 }
11865 case IEMMODE_64BIT:
11866 switch (pIemCpu->enmEffAddrMode)
11867 {
11868 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11869 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11870 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11872 }
11873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11874 }
11875 }
11876 IEMOP_MNEMONIC("lods rAX,Xv");
11877
11878 /*
11879 * Annoying double switch here.
11880 * Using ugly macro for implementing the cases, sharing it with lodsb.
11881 */
11882 switch (pIemCpu->enmEffOpSize)
11883 {
11884 case IEMMODE_16BIT:
11885 switch (pIemCpu->enmEffAddrMode)
11886 {
11887 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11888 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11889 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11891 }
11892 break;
11893
11894 case IEMMODE_32BIT:
11895 switch (pIemCpu->enmEffAddrMode)
11896 {
11897 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11898 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11899 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11901 }
11902 break;
11903
11904 case IEMMODE_64BIT:
11905 switch (pIemCpu->enmEffAddrMode)
11906 {
11907 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11908 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11909 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11911 }
11912 break;
11913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11914 }
11915 return VINF_SUCCESS;
11916}
11917
11918#undef IEM_LODS_CASE
11919
11920/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11921#define IEM_SCAS_CASE(ValBits, AddrBits) \
11922 IEM_MC_BEGIN(3, 2); \
11923 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11924 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11925 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11926 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11927 \
11928 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11929 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11930 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11931 IEM_MC_REF_EFLAGS(pEFlags); \
11932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11933 \
11934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11935 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11936 } IEM_MC_ELSE() { \
11937 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11938 } IEM_MC_ENDIF(); \
11939 IEM_MC_ADVANCE_RIP(); \
11940 IEM_MC_END();
11941
11942/** Opcode 0xae. */
11943FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11944{
11945 IEMOP_HLP_NO_LOCK_PREFIX();
11946
11947 /*
11948 * Use the C implementation if a repeat prefix is encountered.
11949 */
11950 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11951 {
11952 IEMOP_MNEMONIC("repe scasb al,Xb");
11953 switch (pIemCpu->enmEffAddrMode)
11954 {
11955 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11956 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11957 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11959 }
11960 }
11961 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11962 {
11963 IEMOP_MNEMONIC("repne scasb al,Xb");
11964 switch (pIemCpu->enmEffAddrMode)
11965 {
11966 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11967 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11968 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11970 }
11971 }
11972 IEMOP_MNEMONIC("scasb al,Xb");
11973
11974 /*
11975 * Sharing case implementation with stos[wdq] below.
11976 */
11977 switch (pIemCpu->enmEffAddrMode)
11978 {
11979 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11980 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11981 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11983 }
11984 return VINF_SUCCESS;
11985}
11986
11987
11988/** Opcode 0xaf. */
11989FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11990{
11991 IEMOP_HLP_NO_LOCK_PREFIX();
11992
11993 /*
11994 * Use the C implementation if a repeat prefix is encountered.
11995 */
11996 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11997 {
11998 IEMOP_MNEMONIC("repe scas rAX,Xv");
11999 switch (pIemCpu->enmEffOpSize)
12000 {
12001 case IEMMODE_16BIT:
12002 switch (pIemCpu->enmEffAddrMode)
12003 {
12004 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12005 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12006 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12008 }
12009 break;
12010 case IEMMODE_32BIT:
12011 switch (pIemCpu->enmEffAddrMode)
12012 {
12013 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12014 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12015 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12017 }
12018 case IEMMODE_64BIT:
12019 switch (pIemCpu->enmEffAddrMode)
12020 {
12021 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12022 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12023 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12025 }
12026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12027 }
12028 }
12029 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
12030 {
12031 IEMOP_MNEMONIC("repne scas rAX,Xv");
12032 switch (pIemCpu->enmEffOpSize)
12033 {
12034 case IEMMODE_16BIT:
12035 switch (pIemCpu->enmEffAddrMode)
12036 {
12037 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12041 }
12042 break;
12043 case IEMMODE_32BIT:
12044 switch (pIemCpu->enmEffAddrMode)
12045 {
12046 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12047 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12048 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12050 }
12051 case IEMMODE_64BIT:
12052 switch (pIemCpu->enmEffAddrMode)
12053 {
12054 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12055 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12056 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12058 }
12059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12060 }
12061 }
12062 IEMOP_MNEMONIC("scas rAX,Xv");
12063
12064 /*
12065 * Annoying double switch here.
12066 * Using ugly macro for implementing the cases, sharing it with scasb.
12067 */
12068 switch (pIemCpu->enmEffOpSize)
12069 {
12070 case IEMMODE_16BIT:
12071 switch (pIemCpu->enmEffAddrMode)
12072 {
12073 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12074 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12075 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12077 }
12078 break;
12079
12080 case IEMMODE_32BIT:
12081 switch (pIemCpu->enmEffAddrMode)
12082 {
12083 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12084 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12085 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12087 }
12088 break;
12089
12090 case IEMMODE_64BIT:
12091 switch (pIemCpu->enmEffAddrMode)
12092 {
12093 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12094 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12095 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12097 }
12098 break;
12099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12100 }
12101 return VINF_SUCCESS;
12102}
12103
12104#undef IEM_SCAS_CASE
12105
12106/**
12107 * Common 'mov r8, imm8' helper.
12108 */
12109FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12110{
12111 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12112 IEMOP_HLP_NO_LOCK_PREFIX();
12113
12114 IEM_MC_BEGIN(0, 1);
12115 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12116 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12117 IEM_MC_ADVANCE_RIP();
12118 IEM_MC_END();
12119
12120 return VINF_SUCCESS;
12121}
12122
12123
12124/** Opcode 0xb0. */
12125FNIEMOP_DEF(iemOp_mov_AL_Ib)
12126{
12127 IEMOP_MNEMONIC("mov AL,Ib");
12128 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
12129}
12130
12131
12132/** Opcode 0xb1. */
12133FNIEMOP_DEF(iemOp_CL_Ib)
12134{
12135 IEMOP_MNEMONIC("mov CL,Ib");
12136 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
12137}
12138
12139
12140/** Opcode 0xb2. */
12141FNIEMOP_DEF(iemOp_DL_Ib)
12142{
12143 IEMOP_MNEMONIC("mov DL,Ib");
12144 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
12145}
12146
12147
12148/** Opcode 0xb3. */
12149FNIEMOP_DEF(iemOp_BL_Ib)
12150{
12151 IEMOP_MNEMONIC("mov BL,Ib");
12152 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
12153}
12154
12155
12156/** Opcode 0xb4. */
12157FNIEMOP_DEF(iemOp_mov_AH_Ib)
12158{
12159 IEMOP_MNEMONIC("mov AH,Ib");
12160 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
12161}
12162
12163
12164/** Opcode 0xb5. */
12165FNIEMOP_DEF(iemOp_CH_Ib)
12166{
12167 IEMOP_MNEMONIC("mov CH,Ib");
12168 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
12169}
12170
12171
12172/** Opcode 0xb6. */
12173FNIEMOP_DEF(iemOp_DH_Ib)
12174{
12175 IEMOP_MNEMONIC("mov DH,Ib");
12176 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
12177}
12178
12179
12180/** Opcode 0xb7. */
12181FNIEMOP_DEF(iemOp_BH_Ib)
12182{
12183 IEMOP_MNEMONIC("mov BH,Ib");
12184 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
12185}
12186
12187
12188/**
12189 * Common 'mov regX,immX' helper.
12190 */
12191FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12192{
12193 switch (pIemCpu->enmEffOpSize)
12194 {
12195 case IEMMODE_16BIT:
12196 {
12197 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12198 IEMOP_HLP_NO_LOCK_PREFIX();
12199
12200 IEM_MC_BEGIN(0, 1);
12201 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12202 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12203 IEM_MC_ADVANCE_RIP();
12204 IEM_MC_END();
12205 break;
12206 }
12207
12208 case IEMMODE_32BIT:
12209 {
12210 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12211 IEMOP_HLP_NO_LOCK_PREFIX();
12212
12213 IEM_MC_BEGIN(0, 1);
12214 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12215 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12216 IEM_MC_ADVANCE_RIP();
12217 IEM_MC_END();
12218 break;
12219 }
12220 case IEMMODE_64BIT:
12221 {
12222 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12223 IEMOP_HLP_NO_LOCK_PREFIX();
12224
12225 IEM_MC_BEGIN(0, 1);
12226 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12227 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12228 IEM_MC_ADVANCE_RIP();
12229 IEM_MC_END();
12230 break;
12231 }
12232 }
12233
12234 return VINF_SUCCESS;
12235}
12236
12237
12238/** Opcode 0xb8. */
12239FNIEMOP_DEF(iemOp_eAX_Iv)
12240{
12241 IEMOP_MNEMONIC("mov rAX,IV");
12242 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
12243}
12244
12245
12246/** Opcode 0xb9. */
12247FNIEMOP_DEF(iemOp_eCX_Iv)
12248{
12249 IEMOP_MNEMONIC("mov rCX,IV");
12250 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
12251}
12252
12253
12254/** Opcode 0xba. */
12255FNIEMOP_DEF(iemOp_eDX_Iv)
12256{
12257 IEMOP_MNEMONIC("mov rDX,IV");
12258 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
12259}
12260
12261
12262/** Opcode 0xbb. */
12263FNIEMOP_DEF(iemOp_eBX_Iv)
12264{
12265 IEMOP_MNEMONIC("mov rBX,IV");
12266 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
12267}
12268
12269
12270/** Opcode 0xbc. */
12271FNIEMOP_DEF(iemOp_eSP_Iv)
12272{
12273 IEMOP_MNEMONIC("mov rSP,IV");
12274 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12275}
12276
12277
12278/** Opcode 0xbd. */
12279FNIEMOP_DEF(iemOp_eBP_Iv)
12280{
12281 IEMOP_MNEMONIC("mov rBP,IV");
12282 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12283}
12284
12285
12286/** Opcode 0xbe. */
12287FNIEMOP_DEF(iemOp_eSI_Iv)
12288{
12289 IEMOP_MNEMONIC("mov rSI,IV");
12290 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12291}
12292
12293
12294/** Opcode 0xbf. */
12295FNIEMOP_DEF(iemOp_eDI_Iv)
12296{
12297 IEMOP_MNEMONIC("mov rDI,IV");
12298 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12299}
12300
12301
12302/** Opcode 0xc0. */
12303FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12304{
12305 IEMOP_HLP_MIN_186();
12306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12307 PCIEMOPSHIFTSIZES pImpl;
12308 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12309 {
12310 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12311 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12312 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12313 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12314 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12315 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12316 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12317 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12318 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12319 }
12320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12321
12322 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12323 {
12324 /* register */
12325 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12326 IEMOP_HLP_NO_LOCK_PREFIX();
12327 IEM_MC_BEGIN(3, 0);
12328 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12329 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12331 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12332 IEM_MC_REF_EFLAGS(pEFlags);
12333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12334 IEM_MC_ADVANCE_RIP();
12335 IEM_MC_END();
12336 }
12337 else
12338 {
12339 /* memory */
12340 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12341 IEM_MC_BEGIN(3, 2);
12342 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12343 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12344 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12346
12347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12348 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12349 IEM_MC_ASSIGN(cShiftArg, cShift);
12350 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12351 IEM_MC_FETCH_EFLAGS(EFlags);
12352 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12353
12354 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12355 IEM_MC_COMMIT_EFLAGS(EFlags);
12356 IEM_MC_ADVANCE_RIP();
12357 IEM_MC_END();
12358 }
12359 return VINF_SUCCESS;
12360}
12361
12362
12363/** Opcode 0xc1. */
12364FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12365{
12366 IEMOP_HLP_MIN_186();
12367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12368 PCIEMOPSHIFTSIZES pImpl;
12369 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12370 {
12371 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12372 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12373 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12374 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12375 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12376 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12377 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12378 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12379 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12380 }
12381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12382
12383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12384 {
12385 /* register */
12386 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12387 IEMOP_HLP_NO_LOCK_PREFIX();
12388 switch (pIemCpu->enmEffOpSize)
12389 {
12390 case IEMMODE_16BIT:
12391 IEM_MC_BEGIN(3, 0);
12392 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12393 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12394 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12395 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12396 IEM_MC_REF_EFLAGS(pEFlags);
12397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12398 IEM_MC_ADVANCE_RIP();
12399 IEM_MC_END();
12400 return VINF_SUCCESS;
12401
12402 case IEMMODE_32BIT:
12403 IEM_MC_BEGIN(3, 0);
12404 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12405 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12406 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12407 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12408 IEM_MC_REF_EFLAGS(pEFlags);
12409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12410 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12411 IEM_MC_ADVANCE_RIP();
12412 IEM_MC_END();
12413 return VINF_SUCCESS;
12414
12415 case IEMMODE_64BIT:
12416 IEM_MC_BEGIN(3, 0);
12417 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12418 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12419 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12420 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12421 IEM_MC_REF_EFLAGS(pEFlags);
12422 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12423 IEM_MC_ADVANCE_RIP();
12424 IEM_MC_END();
12425 return VINF_SUCCESS;
12426
12427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12428 }
12429 }
12430 else
12431 {
12432 /* memory */
12433 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12434 switch (pIemCpu->enmEffOpSize)
12435 {
12436 case IEMMODE_16BIT:
12437 IEM_MC_BEGIN(3, 2);
12438 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12439 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12440 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12442
12443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12444 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12445 IEM_MC_ASSIGN(cShiftArg, cShift);
12446 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12447 IEM_MC_FETCH_EFLAGS(EFlags);
12448 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12449
12450 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12451 IEM_MC_COMMIT_EFLAGS(EFlags);
12452 IEM_MC_ADVANCE_RIP();
12453 IEM_MC_END();
12454 return VINF_SUCCESS;
12455
12456 case IEMMODE_32BIT:
12457 IEM_MC_BEGIN(3, 2);
12458 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12459 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12460 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12462
12463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12464 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12465 IEM_MC_ASSIGN(cShiftArg, cShift);
12466 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12467 IEM_MC_FETCH_EFLAGS(EFlags);
12468 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12469
12470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12471 IEM_MC_COMMIT_EFLAGS(EFlags);
12472 IEM_MC_ADVANCE_RIP();
12473 IEM_MC_END();
12474 return VINF_SUCCESS;
12475
12476 case IEMMODE_64BIT:
12477 IEM_MC_BEGIN(3, 2);
12478 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12479 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12480 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12482
12483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12484 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12485 IEM_MC_ASSIGN(cShiftArg, cShift);
12486 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12487 IEM_MC_FETCH_EFLAGS(EFlags);
12488 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12489
12490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12491 IEM_MC_COMMIT_EFLAGS(EFlags);
12492 IEM_MC_ADVANCE_RIP();
12493 IEM_MC_END();
12494 return VINF_SUCCESS;
12495
12496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12497 }
12498 }
12499}
12500
12501
12502/** Opcode 0xc2. */
12503FNIEMOP_DEF(iemOp_retn_Iw)
12504{
12505 IEMOP_MNEMONIC("retn Iw");
12506 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12507 IEMOP_HLP_NO_LOCK_PREFIX();
12508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12509 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12510}
12511
12512
12513/** Opcode 0xc3. */
12514FNIEMOP_DEF(iemOp_retn)
12515{
12516 IEMOP_MNEMONIC("retn");
12517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12518 IEMOP_HLP_NO_LOCK_PREFIX();
12519 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12520}
12521
12522
12523/** Opcode 0xc4. */
12524FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12525{
12526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12527 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12528 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12529 {
12530 IEMOP_MNEMONIC("2-byte-vex");
12531 /* The LES instruction is invalid 64-bit mode. In legacy and
12532 compatability mode it is invalid with MOD=3.
12533 The use as a VEX prefix is made possible by assigning the inverted
12534 REX.R to the top MOD bit, and the top bit in the inverted register
12535 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12536 to accessing registers 0..7 in this VEX form. */
12537 /** @todo VEX: Just use new tables for it. */
12538 return IEMOP_RAISE_INVALID_OPCODE();
12539 }
12540 IEMOP_MNEMONIC("les Gv,Mp");
12541 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12542}
12543
12544
12545/** Opcode 0xc5. */
12546FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12547{
12548 /* The LDS instruction is invalid 64-bit mode. In legacy and
12549 compatability mode it is invalid with MOD=3.
12550 The use as a VEX prefix is made possible by assigning the inverted
12551 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12552 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12554 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12555 {
12556 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12557 {
12558 IEMOP_MNEMONIC("lds Gv,Mp");
12559 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12560 }
12561 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12562 }
12563
12564 IEMOP_MNEMONIC("3-byte-vex");
12565 /** @todo Test when exctly the VEX conformance checks kick in during
12566 * instruction decoding and fetching (using \#PF). */
12567 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12568 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12569 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12570#if 0 /* will make sense of this next week... */
12571 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12572 &&
12573 )
12574 {
12575
12576 }
12577#endif
12578
12579 /** @todo VEX: Just use new tables for it. */
12580 return IEMOP_RAISE_INVALID_OPCODE();
12581}
12582
12583
12584/** Opcode 0xc6. */
12585FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12586{
12587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12588 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12589 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12590 return IEMOP_RAISE_INVALID_OPCODE();
12591 IEMOP_MNEMONIC("mov Eb,Ib");
12592
12593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12594 {
12595 /* register access */
12596 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12597 IEM_MC_BEGIN(0, 0);
12598 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12599 IEM_MC_ADVANCE_RIP();
12600 IEM_MC_END();
12601 }
12602 else
12603 {
12604 /* memory access. */
12605 IEM_MC_BEGIN(0, 1);
12606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12608 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12609 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12610 IEM_MC_ADVANCE_RIP();
12611 IEM_MC_END();
12612 }
12613 return VINF_SUCCESS;
12614}
12615
12616
12617/** Opcode 0xc7. */
12618FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12619{
12620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12621 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12622 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12623 return IEMOP_RAISE_INVALID_OPCODE();
12624 IEMOP_MNEMONIC("mov Ev,Iz");
12625
12626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12627 {
12628 /* register access */
12629 switch (pIemCpu->enmEffOpSize)
12630 {
12631 case IEMMODE_16BIT:
12632 IEM_MC_BEGIN(0, 0);
12633 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12634 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12635 IEM_MC_ADVANCE_RIP();
12636 IEM_MC_END();
12637 return VINF_SUCCESS;
12638
12639 case IEMMODE_32BIT:
12640 IEM_MC_BEGIN(0, 0);
12641 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12642 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12643 IEM_MC_ADVANCE_RIP();
12644 IEM_MC_END();
12645 return VINF_SUCCESS;
12646
12647 case IEMMODE_64BIT:
12648 IEM_MC_BEGIN(0, 0);
12649 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12650 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12651 IEM_MC_ADVANCE_RIP();
12652 IEM_MC_END();
12653 return VINF_SUCCESS;
12654
12655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12656 }
12657 }
12658 else
12659 {
12660 /* memory access. */
12661 switch (pIemCpu->enmEffOpSize)
12662 {
12663 case IEMMODE_16BIT:
12664 IEM_MC_BEGIN(0, 1);
12665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12667 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12668 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12669 IEM_MC_ADVANCE_RIP();
12670 IEM_MC_END();
12671 return VINF_SUCCESS;
12672
12673 case IEMMODE_32BIT:
12674 IEM_MC_BEGIN(0, 1);
12675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12677 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12678 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12679 IEM_MC_ADVANCE_RIP();
12680 IEM_MC_END();
12681 return VINF_SUCCESS;
12682
12683 case IEMMODE_64BIT:
12684 IEM_MC_BEGIN(0, 1);
12685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12687 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12688 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12689 IEM_MC_ADVANCE_RIP();
12690 IEM_MC_END();
12691 return VINF_SUCCESS;
12692
12693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12694 }
12695 }
12696}
12697
12698
12699
12700
12701/** Opcode 0xc8. */
12702FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12703{
12704 IEMOP_MNEMONIC("enter Iw,Ib");
12705 IEMOP_HLP_MIN_186();
12706 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12707 IEMOP_HLP_NO_LOCK_PREFIX();
12708 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12709 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12710 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12711}
12712
12713
12714/** Opcode 0xc9. */
12715FNIEMOP_DEF(iemOp_leave)
12716{
12717 IEMOP_MNEMONIC("retn");
12718 IEMOP_HLP_MIN_186();
12719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12720 IEMOP_HLP_NO_LOCK_PREFIX();
12721 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12722}
12723
12724
12725/** Opcode 0xca. */
12726FNIEMOP_DEF(iemOp_retf_Iw)
12727{
12728 IEMOP_MNEMONIC("retf Iw");
12729 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12730 IEMOP_HLP_NO_LOCK_PREFIX();
12731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12732 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12733}
12734
12735
12736/** Opcode 0xcb. */
12737FNIEMOP_DEF(iemOp_retf)
12738{
12739 IEMOP_MNEMONIC("retf");
12740 IEMOP_HLP_NO_LOCK_PREFIX();
12741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12742 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12743}
12744
12745
12746/** Opcode 0xcc. */
12747FNIEMOP_DEF(iemOp_int_3)
12748{
12749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12750 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12751}
12752
12753
12754/** Opcode 0xcd. */
12755FNIEMOP_DEF(iemOp_int_Ib)
12756{
12757 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12759 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12760}
12761
12762
12763/** Opcode 0xce. */
12764FNIEMOP_DEF(iemOp_into)
12765{
12766 IEMOP_MNEMONIC("into");
12767 IEMOP_HLP_NO_64BIT();
12768
12769 IEM_MC_BEGIN(2, 0);
12770 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12771 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12772 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12773 IEM_MC_END();
12774 return VINF_SUCCESS;
12775}
12776
12777
12778/** Opcode 0xcf. */
12779FNIEMOP_DEF(iemOp_iret)
12780{
12781 IEMOP_MNEMONIC("iret");
12782 IEMOP_HLP_NO_LOCK_PREFIX();
12783 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12784}
12785
12786
12787/** Opcode 0xd0. */
12788FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12789{
12790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12791 PCIEMOPSHIFTSIZES pImpl;
12792 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12793 {
12794 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12795 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12796 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12797 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12798 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12799 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12800 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12801 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12802 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12803 }
12804 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12805
12806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12807 {
12808 /* register */
12809 IEMOP_HLP_NO_LOCK_PREFIX();
12810 IEM_MC_BEGIN(3, 0);
12811 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12812 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12814 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12815 IEM_MC_REF_EFLAGS(pEFlags);
12816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12817 IEM_MC_ADVANCE_RIP();
12818 IEM_MC_END();
12819 }
12820 else
12821 {
12822 /* memory */
12823 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12824 IEM_MC_BEGIN(3, 2);
12825 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12826 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12827 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12829
12830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12831 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12832 IEM_MC_FETCH_EFLAGS(EFlags);
12833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12834
12835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12836 IEM_MC_COMMIT_EFLAGS(EFlags);
12837 IEM_MC_ADVANCE_RIP();
12838 IEM_MC_END();
12839 }
12840 return VINF_SUCCESS;
12841}
12842
12843
12844
12845/** Opcode 0xd1. */
12846FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12847{
12848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12849 PCIEMOPSHIFTSIZES pImpl;
12850 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12851 {
12852 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12853 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12854 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12855 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12856 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12857 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12858 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12859 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12861 }
12862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12863
12864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12865 {
12866 /* register */
12867 IEMOP_HLP_NO_LOCK_PREFIX();
12868 switch (pIemCpu->enmEffOpSize)
12869 {
12870 case IEMMODE_16BIT:
12871 IEM_MC_BEGIN(3, 0);
12872 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12873 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12874 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12875 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12876 IEM_MC_REF_EFLAGS(pEFlags);
12877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12878 IEM_MC_ADVANCE_RIP();
12879 IEM_MC_END();
12880 return VINF_SUCCESS;
12881
12882 case IEMMODE_32BIT:
12883 IEM_MC_BEGIN(3, 0);
12884 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12885 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12887 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12888 IEM_MC_REF_EFLAGS(pEFlags);
12889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12890 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12891 IEM_MC_ADVANCE_RIP();
12892 IEM_MC_END();
12893 return VINF_SUCCESS;
12894
12895 case IEMMODE_64BIT:
12896 IEM_MC_BEGIN(3, 0);
12897 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12898 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12900 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12901 IEM_MC_REF_EFLAGS(pEFlags);
12902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12903 IEM_MC_ADVANCE_RIP();
12904 IEM_MC_END();
12905 return VINF_SUCCESS;
12906
12907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12908 }
12909 }
12910 else
12911 {
12912 /* memory */
12913 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12914 switch (pIemCpu->enmEffOpSize)
12915 {
12916 case IEMMODE_16BIT:
12917 IEM_MC_BEGIN(3, 2);
12918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12919 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12920 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12922
12923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12924 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12925 IEM_MC_FETCH_EFLAGS(EFlags);
12926 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12927
12928 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12929 IEM_MC_COMMIT_EFLAGS(EFlags);
12930 IEM_MC_ADVANCE_RIP();
12931 IEM_MC_END();
12932 return VINF_SUCCESS;
12933
12934 case IEMMODE_32BIT:
12935 IEM_MC_BEGIN(3, 2);
12936 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12937 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12938 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12940
12941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12942 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12943 IEM_MC_FETCH_EFLAGS(EFlags);
12944 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12945
12946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12947 IEM_MC_COMMIT_EFLAGS(EFlags);
12948 IEM_MC_ADVANCE_RIP();
12949 IEM_MC_END();
12950 return VINF_SUCCESS;
12951
12952 case IEMMODE_64BIT:
12953 IEM_MC_BEGIN(3, 2);
12954 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12955 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12956 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12958
12959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12960 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12961 IEM_MC_FETCH_EFLAGS(EFlags);
12962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12963
12964 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12965 IEM_MC_COMMIT_EFLAGS(EFlags);
12966 IEM_MC_ADVANCE_RIP();
12967 IEM_MC_END();
12968 return VINF_SUCCESS;
12969
12970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12971 }
12972 }
12973}
12974
12975
12976/** Opcode 0xd2. */
12977FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12978{
12979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12980 PCIEMOPSHIFTSIZES pImpl;
12981 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12982 {
12983 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12984 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12985 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12986 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12987 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12988 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12989 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12990 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12991 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12992 }
12993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12994
12995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12996 {
12997 /* register */
12998 IEMOP_HLP_NO_LOCK_PREFIX();
12999 IEM_MC_BEGIN(3, 0);
13000 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13001 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13003 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13004 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13005 IEM_MC_REF_EFLAGS(pEFlags);
13006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13007 IEM_MC_ADVANCE_RIP();
13008 IEM_MC_END();
13009 }
13010 else
13011 {
13012 /* memory */
13013 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13014 IEM_MC_BEGIN(3, 2);
13015 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13016 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13017 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13019
13020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13021 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13022 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13023 IEM_MC_FETCH_EFLAGS(EFlags);
13024 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13025
13026 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13027 IEM_MC_COMMIT_EFLAGS(EFlags);
13028 IEM_MC_ADVANCE_RIP();
13029 IEM_MC_END();
13030 }
13031 return VINF_SUCCESS;
13032}
13033
13034
13035/** Opcode 0xd3. */
13036FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13037{
13038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13039 PCIEMOPSHIFTSIZES pImpl;
13040 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13041 {
13042 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
13043 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
13044 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
13045 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
13046 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
13047 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
13048 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
13049 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13050 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13051 }
13052 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13053
13054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13055 {
13056 /* register */
13057 IEMOP_HLP_NO_LOCK_PREFIX();
13058 switch (pIemCpu->enmEffOpSize)
13059 {
13060 case IEMMODE_16BIT:
13061 IEM_MC_BEGIN(3, 0);
13062 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13063 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13064 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13065 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13066 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13067 IEM_MC_REF_EFLAGS(pEFlags);
13068 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13069 IEM_MC_ADVANCE_RIP();
13070 IEM_MC_END();
13071 return VINF_SUCCESS;
13072
13073 case IEMMODE_32BIT:
13074 IEM_MC_BEGIN(3, 0);
13075 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13076 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13077 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13078 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13079 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13080 IEM_MC_REF_EFLAGS(pEFlags);
13081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13082 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13083 IEM_MC_ADVANCE_RIP();
13084 IEM_MC_END();
13085 return VINF_SUCCESS;
13086
13087 case IEMMODE_64BIT:
13088 IEM_MC_BEGIN(3, 0);
13089 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13090 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13091 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13092 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13093 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13094 IEM_MC_REF_EFLAGS(pEFlags);
13095 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13096 IEM_MC_ADVANCE_RIP();
13097 IEM_MC_END();
13098 return VINF_SUCCESS;
13099
13100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13101 }
13102 }
13103 else
13104 {
13105 /* memory */
13106 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13107 switch (pIemCpu->enmEffOpSize)
13108 {
13109 case IEMMODE_16BIT:
13110 IEM_MC_BEGIN(3, 2);
13111 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13112 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13113 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13115
13116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13117 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13118 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13119 IEM_MC_FETCH_EFLAGS(EFlags);
13120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13121
13122 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13123 IEM_MC_COMMIT_EFLAGS(EFlags);
13124 IEM_MC_ADVANCE_RIP();
13125 IEM_MC_END();
13126 return VINF_SUCCESS;
13127
13128 case IEMMODE_32BIT:
13129 IEM_MC_BEGIN(3, 2);
13130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13131 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13132 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13134
13135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13136 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13137 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13138 IEM_MC_FETCH_EFLAGS(EFlags);
13139 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13140
13141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13142 IEM_MC_COMMIT_EFLAGS(EFlags);
13143 IEM_MC_ADVANCE_RIP();
13144 IEM_MC_END();
13145 return VINF_SUCCESS;
13146
13147 case IEMMODE_64BIT:
13148 IEM_MC_BEGIN(3, 2);
13149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13150 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13151 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13153
13154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13155 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13156 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13157 IEM_MC_FETCH_EFLAGS(EFlags);
13158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13159
13160 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13161 IEM_MC_COMMIT_EFLAGS(EFlags);
13162 IEM_MC_ADVANCE_RIP();
13163 IEM_MC_END();
13164 return VINF_SUCCESS;
13165
13166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13167 }
13168 }
13169}
13170
13171/** Opcode 0xd4. */
13172FNIEMOP_DEF(iemOp_aam_Ib)
13173{
13174 IEMOP_MNEMONIC("aam Ib");
13175 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13176 IEMOP_HLP_NO_LOCK_PREFIX();
13177 IEMOP_HLP_NO_64BIT();
13178 if (!bImm)
13179 return IEMOP_RAISE_DIVIDE_ERROR();
13180 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13181}
13182
13183
13184/** Opcode 0xd5. */
13185FNIEMOP_DEF(iemOp_aad_Ib)
13186{
13187 IEMOP_MNEMONIC("aad Ib");
13188 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13189 IEMOP_HLP_NO_LOCK_PREFIX();
13190 IEMOP_HLP_NO_64BIT();
13191 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13192}
13193
13194
13195/** Opcode 0xd6. */
13196FNIEMOP_DEF(iemOp_salc)
13197{
13198 IEMOP_MNEMONIC("salc");
13199 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13200 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13202 IEMOP_HLP_NO_64BIT();
13203
13204 IEM_MC_BEGIN(0, 0);
13205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13206 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13207 } IEM_MC_ELSE() {
13208 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13209 } IEM_MC_ENDIF();
13210 IEM_MC_ADVANCE_RIP();
13211 IEM_MC_END();
13212 return VINF_SUCCESS;
13213}
13214
13215
13216/** Opcode 0xd7. */
13217FNIEMOP_DEF(iemOp_xlat)
13218{
13219 IEMOP_MNEMONIC("xlat");
13220 IEMOP_HLP_NO_LOCK_PREFIX();
13221 switch (pIemCpu->enmEffAddrMode)
13222 {
13223 case IEMMODE_16BIT:
13224 IEM_MC_BEGIN(2, 0);
13225 IEM_MC_LOCAL(uint8_t, u8Tmp);
13226 IEM_MC_LOCAL(uint16_t, u16Addr);
13227 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13228 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13229 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
13230 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13231 IEM_MC_ADVANCE_RIP();
13232 IEM_MC_END();
13233 return VINF_SUCCESS;
13234
13235 case IEMMODE_32BIT:
13236 IEM_MC_BEGIN(2, 0);
13237 IEM_MC_LOCAL(uint8_t, u8Tmp);
13238 IEM_MC_LOCAL(uint32_t, u32Addr);
13239 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13240 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13241 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
13242 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13243 IEM_MC_ADVANCE_RIP();
13244 IEM_MC_END();
13245 return VINF_SUCCESS;
13246
13247 case IEMMODE_64BIT:
13248 IEM_MC_BEGIN(2, 0);
13249 IEM_MC_LOCAL(uint8_t, u8Tmp);
13250 IEM_MC_LOCAL(uint64_t, u64Addr);
13251 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13252 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13253 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
13254 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13255 IEM_MC_ADVANCE_RIP();
13256 IEM_MC_END();
13257 return VINF_SUCCESS;
13258
13259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13260 }
13261}
13262
13263
13264/**
13265 * Common worker for FPU instructions working on ST0 and STn, and storing the
13266 * result in ST0.
13267 *
13268 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13269 */
13270FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13271{
13272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13273
13274 IEM_MC_BEGIN(3, 1);
13275 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13276 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13277 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13278 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13279
13280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13282 IEM_MC_PREPARE_FPU_USAGE();
13283 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13284 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13285 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13286 IEM_MC_ELSE()
13287 IEM_MC_FPU_STACK_UNDERFLOW(0);
13288 IEM_MC_ENDIF();
13289 IEM_MC_ADVANCE_RIP();
13290
13291 IEM_MC_END();
13292 return VINF_SUCCESS;
13293}
13294
13295
13296/**
13297 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13298 * flags.
13299 *
13300 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13301 */
13302FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13303{
13304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13305
13306 IEM_MC_BEGIN(3, 1);
13307 IEM_MC_LOCAL(uint16_t, u16Fsw);
13308 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13309 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13311
13312 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13313 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13314 IEM_MC_PREPARE_FPU_USAGE();
13315 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13316 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13317 IEM_MC_UPDATE_FSW(u16Fsw);
13318 IEM_MC_ELSE()
13319 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13320 IEM_MC_ENDIF();
13321 IEM_MC_ADVANCE_RIP();
13322
13323 IEM_MC_END();
13324 return VINF_SUCCESS;
13325}
13326
13327
13328/**
13329 * Common worker for FPU instructions working on ST0 and STn, only affecting
13330 * flags, and popping when done.
13331 *
13332 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13333 */
13334FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13335{
13336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13337
13338 IEM_MC_BEGIN(3, 1);
13339 IEM_MC_LOCAL(uint16_t, u16Fsw);
13340 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13342 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13343
13344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13345 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13346 IEM_MC_PREPARE_FPU_USAGE();
13347 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13348 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13349 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13350 IEM_MC_ELSE()
13351 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13352 IEM_MC_ENDIF();
13353 IEM_MC_ADVANCE_RIP();
13354
13355 IEM_MC_END();
13356 return VINF_SUCCESS;
13357}
13358
13359
13360/** Opcode 0xd8 11/0. */
13361FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13362{
13363 IEMOP_MNEMONIC("fadd st0,stN");
13364 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13365}
13366
13367
13368/** Opcode 0xd8 11/1. */
13369FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13370{
13371 IEMOP_MNEMONIC("fmul st0,stN");
13372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13373}
13374
13375
13376/** Opcode 0xd8 11/2. */
13377FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13378{
13379 IEMOP_MNEMONIC("fcom st0,stN");
13380 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13381}
13382
13383
13384/** Opcode 0xd8 11/3. */
13385FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13386{
13387 IEMOP_MNEMONIC("fcomp st0,stN");
13388 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13389}
13390
13391
13392/** Opcode 0xd8 11/4. */
13393FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13394{
13395 IEMOP_MNEMONIC("fsub st0,stN");
13396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13397}
13398
13399
13400/** Opcode 0xd8 11/5. */
13401FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13402{
13403 IEMOP_MNEMONIC("fsubr st0,stN");
13404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13405}
13406
13407
13408/** Opcode 0xd8 11/6. */
13409FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13410{
13411 IEMOP_MNEMONIC("fdiv st0,stN");
13412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13413}
13414
13415
13416/** Opcode 0xd8 11/7. */
13417FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13418{
13419 IEMOP_MNEMONIC("fdivr st0,stN");
13420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13421}
13422
13423
13424/**
13425 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13426 * the result in ST0.
13427 *
13428 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13429 */
13430FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13431{
13432 IEM_MC_BEGIN(3, 3);
13433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13434 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13435 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13436 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13437 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13438 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13439
13440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13442
13443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13445 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13446
13447 IEM_MC_PREPARE_FPU_USAGE();
13448 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13449 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13450 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13451 IEM_MC_ELSE()
13452 IEM_MC_FPU_STACK_UNDERFLOW(0);
13453 IEM_MC_ENDIF();
13454 IEM_MC_ADVANCE_RIP();
13455
13456 IEM_MC_END();
13457 return VINF_SUCCESS;
13458}
13459
13460
13461/** Opcode 0xd8 !11/0. */
13462FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13463{
13464 IEMOP_MNEMONIC("fadd st0,m32r");
13465 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13466}
13467
13468
13469/** Opcode 0xd8 !11/1. */
13470FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13471{
13472 IEMOP_MNEMONIC("fmul st0,m32r");
13473 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13474}
13475
13476
13477/** Opcode 0xd8 !11/2. */
13478FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13479{
13480 IEMOP_MNEMONIC("fcom st0,m32r");
13481
13482 IEM_MC_BEGIN(3, 3);
13483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13484 IEM_MC_LOCAL(uint16_t, u16Fsw);
13485 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13486 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13487 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13488 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13489
13490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13492
13493 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13494 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13495 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13496
13497 IEM_MC_PREPARE_FPU_USAGE();
13498 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13499 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13500 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13501 IEM_MC_ELSE()
13502 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13503 IEM_MC_ENDIF();
13504 IEM_MC_ADVANCE_RIP();
13505
13506 IEM_MC_END();
13507 return VINF_SUCCESS;
13508}
13509
13510
13511/** Opcode 0xd8 !11/3. */
13512FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13513{
13514 IEMOP_MNEMONIC("fcomp st0,m32r");
13515
13516 IEM_MC_BEGIN(3, 3);
13517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13518 IEM_MC_LOCAL(uint16_t, u16Fsw);
13519 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13520 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13522 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13523
13524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13526
13527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13529 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13530
13531 IEM_MC_PREPARE_FPU_USAGE();
13532 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13533 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13534 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13535 IEM_MC_ELSE()
13536 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13537 IEM_MC_ENDIF();
13538 IEM_MC_ADVANCE_RIP();
13539
13540 IEM_MC_END();
13541 return VINF_SUCCESS;
13542}
13543
13544
13545/** Opcode 0xd8 !11/4. */
13546FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13547{
13548 IEMOP_MNEMONIC("fsub st0,m32r");
13549 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13550}
13551
13552
13553/** Opcode 0xd8 !11/5. */
13554FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13555{
13556 IEMOP_MNEMONIC("fsubr st0,m32r");
13557 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13558}
13559
13560
13561/** Opcode 0xd8 !11/6. */
13562FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13563{
13564 IEMOP_MNEMONIC("fdiv st0,m32r");
13565 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13566}
13567
13568
13569/** Opcode 0xd8 !11/7. */
13570FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13571{
13572 IEMOP_MNEMONIC("fdivr st0,m32r");
13573 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13574}
13575
13576
13577/** Opcode 0xd8. */
13578FNIEMOP_DEF(iemOp_EscF0)
13579{
13580 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13582
13583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13584 {
13585 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13586 {
13587 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13588 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13589 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13590 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13591 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13592 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13593 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13594 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13596 }
13597 }
13598 else
13599 {
13600 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13601 {
13602 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13603 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13604 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13605 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13606 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13607 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13608 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13609 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13611 }
13612 }
13613}
13614
13615
13616/** Opcode 0xd9 /0 mem32real
13617 * @sa iemOp_fld_m64r */
13618FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13619{
13620 IEMOP_MNEMONIC("fld m32r");
13621
13622 IEM_MC_BEGIN(2, 3);
13623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13624 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13625 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13626 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13627 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13628
13629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13631
13632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13634 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13635
13636 IEM_MC_PREPARE_FPU_USAGE();
13637 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13638 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13639 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13640 IEM_MC_ELSE()
13641 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13642 IEM_MC_ENDIF();
13643 IEM_MC_ADVANCE_RIP();
13644
13645 IEM_MC_END();
13646 return VINF_SUCCESS;
13647}
13648
13649
13650/** Opcode 0xd9 !11/2 mem32real */
13651FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13652{
13653 IEMOP_MNEMONIC("fst m32r");
13654 IEM_MC_BEGIN(3, 2);
13655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13656 IEM_MC_LOCAL(uint16_t, u16Fsw);
13657 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13658 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13659 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13660
13661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13664 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13665
13666 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13667 IEM_MC_PREPARE_FPU_USAGE();
13668 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13669 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13670 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13671 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13672 IEM_MC_ELSE()
13673 IEM_MC_IF_FCW_IM()
13674 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13675 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13676 IEM_MC_ENDIF();
13677 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13678 IEM_MC_ENDIF();
13679 IEM_MC_ADVANCE_RIP();
13680
13681 IEM_MC_END();
13682 return VINF_SUCCESS;
13683}
13684
13685
13686/** Opcode 0xd9 !11/3 */
13687FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13688{
13689 IEMOP_MNEMONIC("fstp m32r");
13690 IEM_MC_BEGIN(3, 2);
13691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13692 IEM_MC_LOCAL(uint16_t, u16Fsw);
13693 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13694 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13695 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13696
13697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13701
13702 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13703 IEM_MC_PREPARE_FPU_USAGE();
13704 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13705 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13706 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13707 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13708 IEM_MC_ELSE()
13709 IEM_MC_IF_FCW_IM()
13710 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13711 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13712 IEM_MC_ENDIF();
13713 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13714 IEM_MC_ENDIF();
13715 IEM_MC_ADVANCE_RIP();
13716
13717 IEM_MC_END();
13718 return VINF_SUCCESS;
13719}
13720
13721
13722/** Opcode 0xd9 !11/4 */
13723FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13724{
13725 IEMOP_MNEMONIC("fldenv m14/28byte");
13726 IEM_MC_BEGIN(3, 0);
13727 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13728 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13729 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13733 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13734 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13735 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13736 IEM_MC_END();
13737 return VINF_SUCCESS;
13738}
13739
13740
13741/** Opcode 0xd9 !11/5 */
13742FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13743{
13744 IEMOP_MNEMONIC("fldcw m2byte");
13745 IEM_MC_BEGIN(1, 1);
13746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13747 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13751 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13752 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13753 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13754 IEM_MC_END();
13755 return VINF_SUCCESS;
13756}
13757
13758
13759/** Opcode 0xd9 !11/6 */
13760FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13761{
13762 IEMOP_MNEMONIC("fstenv m14/m28byte");
13763 IEM_MC_BEGIN(3, 0);
13764 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13765 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13766 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13770 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13771 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13772 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13773 IEM_MC_END();
13774 return VINF_SUCCESS;
13775}
13776
13777
13778/** Opcode 0xd9 !11/7 */
13779FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13780{
13781 IEMOP_MNEMONIC("fnstcw m2byte");
13782 IEM_MC_BEGIN(2, 0);
13783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13784 IEM_MC_LOCAL(uint16_t, u16Fcw);
13785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13788 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13789 IEM_MC_FETCH_FCW(u16Fcw);
13790 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13791 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13792 IEM_MC_END();
13793 return VINF_SUCCESS;
13794}
13795
13796
13797/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13798FNIEMOP_DEF(iemOp_fnop)
13799{
13800 IEMOP_MNEMONIC("fnop");
13801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13802
13803 IEM_MC_BEGIN(0, 0);
13804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13806 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13807 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13808 * intel optimizations. Investigate. */
13809 IEM_MC_UPDATE_FPU_OPCODE_IP();
13810 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13811 IEM_MC_END();
13812 return VINF_SUCCESS;
13813}
13814
13815
13816/** Opcode 0xd9 11/0 stN */
13817FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13818{
13819 IEMOP_MNEMONIC("fld stN");
13820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13821
13822 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13823 * indicates that it does. */
13824 IEM_MC_BEGIN(0, 2);
13825 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13826 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13829
13830 IEM_MC_PREPARE_FPU_USAGE();
13831 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13832 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13833 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13834 IEM_MC_ELSE()
13835 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13836 IEM_MC_ENDIF();
13837
13838 IEM_MC_ADVANCE_RIP();
13839 IEM_MC_END();
13840
13841 return VINF_SUCCESS;
13842}
13843
13844
13845/** Opcode 0xd9 11/3 stN */
13846FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13847{
13848 IEMOP_MNEMONIC("fxch stN");
13849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13850
13851 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13852 * indicates that it does. */
13853 IEM_MC_BEGIN(1, 3);
13854 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13855 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13856 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13857 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13860
13861 IEM_MC_PREPARE_FPU_USAGE();
13862 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13863 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13864 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13865 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13866 IEM_MC_ELSE()
13867 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13868 IEM_MC_ENDIF();
13869
13870 IEM_MC_ADVANCE_RIP();
13871 IEM_MC_END();
13872
13873 return VINF_SUCCESS;
13874}
13875
13876
13877/** Opcode 0xd9 11/4, 0xdd 11/2. */
13878FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13879{
13880 IEMOP_MNEMONIC("fstp st0,stN");
13881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13882
13883 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13884 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13885 if (!iDstReg)
13886 {
13887 IEM_MC_BEGIN(0, 1);
13888 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13891
13892 IEM_MC_PREPARE_FPU_USAGE();
13893 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13894 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13895 IEM_MC_ELSE()
13896 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13897 IEM_MC_ENDIF();
13898
13899 IEM_MC_ADVANCE_RIP();
13900 IEM_MC_END();
13901 }
13902 else
13903 {
13904 IEM_MC_BEGIN(0, 2);
13905 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13906 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13909
13910 IEM_MC_PREPARE_FPU_USAGE();
13911 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13912 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13913 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13914 IEM_MC_ELSE()
13915 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13916 IEM_MC_ENDIF();
13917
13918 IEM_MC_ADVANCE_RIP();
13919 IEM_MC_END();
13920 }
13921 return VINF_SUCCESS;
13922}
13923
13924
13925/**
13926 * Common worker for FPU instructions working on ST0 and replaces it with the
13927 * result, i.e. unary operators.
13928 *
13929 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13930 */
13931FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13932{
13933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13934
13935 IEM_MC_BEGIN(2, 1);
13936 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13937 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13939
13940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13942 IEM_MC_PREPARE_FPU_USAGE();
13943 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13944 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13945 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13946 IEM_MC_ELSE()
13947 IEM_MC_FPU_STACK_UNDERFLOW(0);
13948 IEM_MC_ENDIF();
13949 IEM_MC_ADVANCE_RIP();
13950
13951 IEM_MC_END();
13952 return VINF_SUCCESS;
13953}
13954
13955
13956/** Opcode 0xd9 0xe0. */
13957FNIEMOP_DEF(iemOp_fchs)
13958{
13959 IEMOP_MNEMONIC("fchs st0");
13960 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13961}
13962
13963
13964/** Opcode 0xd9 0xe1. */
13965FNIEMOP_DEF(iemOp_fabs)
13966{
13967 IEMOP_MNEMONIC("fabs st0");
13968 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13969}
13970
13971
13972/**
13973 * Common worker for FPU instructions working on ST0 and only returns FSW.
13974 *
13975 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13976 */
13977FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13978{
13979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13980
13981 IEM_MC_BEGIN(2, 1);
13982 IEM_MC_LOCAL(uint16_t, u16Fsw);
13983 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13984 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13985
13986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13988 IEM_MC_PREPARE_FPU_USAGE();
13989 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13990 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13991 IEM_MC_UPDATE_FSW(u16Fsw);
13992 IEM_MC_ELSE()
13993 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13994 IEM_MC_ENDIF();
13995 IEM_MC_ADVANCE_RIP();
13996
13997 IEM_MC_END();
13998 return VINF_SUCCESS;
13999}
14000
14001
14002/** Opcode 0xd9 0xe4. */
14003FNIEMOP_DEF(iemOp_ftst)
14004{
14005 IEMOP_MNEMONIC("ftst st0");
14006 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14007}
14008
14009
14010/** Opcode 0xd9 0xe5. */
14011FNIEMOP_DEF(iemOp_fxam)
14012{
14013 IEMOP_MNEMONIC("fxam st0");
14014 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14015}
14016
14017
14018/**
14019 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14020 *
14021 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14022 */
14023FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14024{
14025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14026
14027 IEM_MC_BEGIN(1, 1);
14028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14029 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14030
14031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14033 IEM_MC_PREPARE_FPU_USAGE();
14034 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14035 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14036 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14037 IEM_MC_ELSE()
14038 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14039 IEM_MC_ENDIF();
14040 IEM_MC_ADVANCE_RIP();
14041
14042 IEM_MC_END();
14043 return VINF_SUCCESS;
14044}
14045
14046
14047/** Opcode 0xd9 0xe8. */
14048FNIEMOP_DEF(iemOp_fld1)
14049{
14050 IEMOP_MNEMONIC("fld1");
14051 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14052}
14053
14054
14055/** Opcode 0xd9 0xe9. */
14056FNIEMOP_DEF(iemOp_fldl2t)
14057{
14058 IEMOP_MNEMONIC("fldl2t");
14059 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14060}
14061
14062
14063/** Opcode 0xd9 0xea. */
14064FNIEMOP_DEF(iemOp_fldl2e)
14065{
14066 IEMOP_MNEMONIC("fldl2e");
14067 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14068}
14069
14070/** Opcode 0xd9 0xeb. */
14071FNIEMOP_DEF(iemOp_fldpi)
14072{
14073 IEMOP_MNEMONIC("fldpi");
14074 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14075}
14076
14077
14078/** Opcode 0xd9 0xec. */
14079FNIEMOP_DEF(iemOp_fldlg2)
14080{
14081 IEMOP_MNEMONIC("fldlg2");
14082 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14083}
14084
14085/** Opcode 0xd9 0xed. */
14086FNIEMOP_DEF(iemOp_fldln2)
14087{
14088 IEMOP_MNEMONIC("fldln2");
14089 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14090}
14091
14092
14093/** Opcode 0xd9 0xee. */
14094FNIEMOP_DEF(iemOp_fldz)
14095{
14096 IEMOP_MNEMONIC("fldz");
14097 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14098}
14099
14100
14101/** Opcode 0xd9 0xf0. */
14102FNIEMOP_DEF(iemOp_f2xm1)
14103{
14104 IEMOP_MNEMONIC("f2xm1 st0");
14105 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14106}
14107
14108
14109/** Opcode 0xd9 0xf1. */
14110FNIEMOP_DEF(iemOp_fylx2)
14111{
14112 IEMOP_MNEMONIC("fylx2 st0");
14113 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
14114}
14115
14116
14117/**
14118 * Common worker for FPU instructions working on ST0 and having two outputs, one
14119 * replacing ST0 and one pushed onto the stack.
14120 *
14121 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14122 */
14123FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14124{
14125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14126
14127 IEM_MC_BEGIN(2, 1);
14128 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14129 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14130 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14131
14132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14134 IEM_MC_PREPARE_FPU_USAGE();
14135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14136 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14137 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14138 IEM_MC_ELSE()
14139 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14140 IEM_MC_ENDIF();
14141 IEM_MC_ADVANCE_RIP();
14142
14143 IEM_MC_END();
14144 return VINF_SUCCESS;
14145}
14146
14147
14148/** Opcode 0xd9 0xf2. */
14149FNIEMOP_DEF(iemOp_fptan)
14150{
14151 IEMOP_MNEMONIC("fptan st0");
14152 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14153}
14154
14155
14156/**
14157 * Common worker for FPU instructions working on STn and ST0, storing the result
14158 * in STn, and popping the stack unless IE, DE or ZE was raised.
14159 *
14160 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14161 */
14162FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14163{
14164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14165
14166 IEM_MC_BEGIN(3, 1);
14167 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14168 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14169 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14170 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14171
14172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14174
14175 IEM_MC_PREPARE_FPU_USAGE();
14176 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14177 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14178 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14179 IEM_MC_ELSE()
14180 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14181 IEM_MC_ENDIF();
14182 IEM_MC_ADVANCE_RIP();
14183
14184 IEM_MC_END();
14185 return VINF_SUCCESS;
14186}
14187
14188
14189/** Opcode 0xd9 0xf3. */
14190FNIEMOP_DEF(iemOp_fpatan)
14191{
14192 IEMOP_MNEMONIC("fpatan st1,st0");
14193 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14194}
14195
14196
14197/** Opcode 0xd9 0xf4. */
14198FNIEMOP_DEF(iemOp_fxtract)
14199{
14200 IEMOP_MNEMONIC("fxtract st0");
14201 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14202}
14203
14204
14205/** Opcode 0xd9 0xf5. */
14206FNIEMOP_DEF(iemOp_fprem1)
14207{
14208 IEMOP_MNEMONIC("fprem1 st0, st1");
14209 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14210}
14211
14212
14213/** Opcode 0xd9 0xf6. */
14214FNIEMOP_DEF(iemOp_fdecstp)
14215{
14216 IEMOP_MNEMONIC("fdecstp");
14217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14218 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14219 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14220 * FINCSTP and FDECSTP. */
14221
14222 IEM_MC_BEGIN(0,0);
14223
14224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14226
14227 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14228 IEM_MC_FPU_STACK_DEC_TOP();
14229 IEM_MC_UPDATE_FSW_CONST(0);
14230
14231 IEM_MC_ADVANCE_RIP();
14232 IEM_MC_END();
14233 return VINF_SUCCESS;
14234}
14235
14236
14237/** Opcode 0xd9 0xf7. */
14238FNIEMOP_DEF(iemOp_fincstp)
14239{
14240 IEMOP_MNEMONIC("fincstp");
14241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14242 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14243 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14244 * FINCSTP and FDECSTP. */
14245
14246 IEM_MC_BEGIN(0,0);
14247
14248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14250
14251 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14252 IEM_MC_FPU_STACK_INC_TOP();
14253 IEM_MC_UPDATE_FSW_CONST(0);
14254
14255 IEM_MC_ADVANCE_RIP();
14256 IEM_MC_END();
14257 return VINF_SUCCESS;
14258}
14259
14260
14261/** Opcode 0xd9 0xf8. */
14262FNIEMOP_DEF(iemOp_fprem)
14263{
14264 IEMOP_MNEMONIC("fprem st0, st1");
14265 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14266}
14267
14268
14269/** Opcode 0xd9 0xf9. */
14270FNIEMOP_DEF(iemOp_fyl2xp1)
14271{
14272 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14273 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14274}
14275
14276
14277/** Opcode 0xd9 0xfa. */
14278FNIEMOP_DEF(iemOp_fsqrt)
14279{
14280 IEMOP_MNEMONIC("fsqrt st0");
14281 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14282}
14283
14284
14285/** Opcode 0xd9 0xfb. */
14286FNIEMOP_DEF(iemOp_fsincos)
14287{
14288 IEMOP_MNEMONIC("fsincos st0");
14289 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14290}
14291
14292
14293/** Opcode 0xd9 0xfc. */
14294FNIEMOP_DEF(iemOp_frndint)
14295{
14296 IEMOP_MNEMONIC("frndint st0");
14297 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14298}
14299
14300
14301/** Opcode 0xd9 0xfd. */
14302FNIEMOP_DEF(iemOp_fscale)
14303{
14304 IEMOP_MNEMONIC("fscale st0, st1");
14305 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14306}
14307
14308
14309/** Opcode 0xd9 0xfe. */
14310FNIEMOP_DEF(iemOp_fsin)
14311{
14312 IEMOP_MNEMONIC("fsin st0");
14313 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14314}
14315
14316
14317/** Opcode 0xd9 0xff. */
14318FNIEMOP_DEF(iemOp_fcos)
14319{
14320 IEMOP_MNEMONIC("fcos st0");
14321 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14322}
14323
14324
14325/** Used by iemOp_EscF1. */
14326static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14327{
14328 /* 0xe0 */ iemOp_fchs,
14329 /* 0xe1 */ iemOp_fabs,
14330 /* 0xe2 */ iemOp_Invalid,
14331 /* 0xe3 */ iemOp_Invalid,
14332 /* 0xe4 */ iemOp_ftst,
14333 /* 0xe5 */ iemOp_fxam,
14334 /* 0xe6 */ iemOp_Invalid,
14335 /* 0xe7 */ iemOp_Invalid,
14336 /* 0xe8 */ iemOp_fld1,
14337 /* 0xe9 */ iemOp_fldl2t,
14338 /* 0xea */ iemOp_fldl2e,
14339 /* 0xeb */ iemOp_fldpi,
14340 /* 0xec */ iemOp_fldlg2,
14341 /* 0xed */ iemOp_fldln2,
14342 /* 0xee */ iemOp_fldz,
14343 /* 0xef */ iemOp_Invalid,
14344 /* 0xf0 */ iemOp_f2xm1,
14345 /* 0xf1 */ iemOp_fylx2,
14346 /* 0xf2 */ iemOp_fptan,
14347 /* 0xf3 */ iemOp_fpatan,
14348 /* 0xf4 */ iemOp_fxtract,
14349 /* 0xf5 */ iemOp_fprem1,
14350 /* 0xf6 */ iemOp_fdecstp,
14351 /* 0xf7 */ iemOp_fincstp,
14352 /* 0xf8 */ iemOp_fprem,
14353 /* 0xf9 */ iemOp_fyl2xp1,
14354 /* 0xfa */ iemOp_fsqrt,
14355 /* 0xfb */ iemOp_fsincos,
14356 /* 0xfc */ iemOp_frndint,
14357 /* 0xfd */ iemOp_fscale,
14358 /* 0xfe */ iemOp_fsin,
14359 /* 0xff */ iemOp_fcos
14360};
14361
14362
14363/** Opcode 0xd9. */
14364FNIEMOP_DEF(iemOp_EscF1)
14365{
14366 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14369 {
14370 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14371 {
14372 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14373 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14374 case 2:
14375 if (bRm == 0xd0)
14376 return FNIEMOP_CALL(iemOp_fnop);
14377 return IEMOP_RAISE_INVALID_OPCODE();
14378 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14379 case 4:
14380 case 5:
14381 case 6:
14382 case 7:
14383 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14384 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14386 }
14387 }
14388 else
14389 {
14390 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14391 {
14392 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14393 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14394 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14395 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14396 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14397 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14398 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14399 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14401 }
14402 }
14403}
14404
14405
14406/** Opcode 0xda 11/0. */
14407FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14408{
14409 IEMOP_MNEMONIC("fcmovb st0,stN");
14410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14411
14412 IEM_MC_BEGIN(0, 1);
14413 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14414
14415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14417
14418 IEM_MC_PREPARE_FPU_USAGE();
14419 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14420 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14421 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14422 IEM_MC_ENDIF();
14423 IEM_MC_UPDATE_FPU_OPCODE_IP();
14424 IEM_MC_ELSE()
14425 IEM_MC_FPU_STACK_UNDERFLOW(0);
14426 IEM_MC_ENDIF();
14427 IEM_MC_ADVANCE_RIP();
14428
14429 IEM_MC_END();
14430 return VINF_SUCCESS;
14431}
14432
14433
14434/** Opcode 0xda 11/1. */
14435FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14436{
14437 IEMOP_MNEMONIC("fcmove st0,stN");
14438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14439
14440 IEM_MC_BEGIN(0, 1);
14441 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14442
14443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14445
14446 IEM_MC_PREPARE_FPU_USAGE();
14447 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14448 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14449 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14450 IEM_MC_ENDIF();
14451 IEM_MC_UPDATE_FPU_OPCODE_IP();
14452 IEM_MC_ELSE()
14453 IEM_MC_FPU_STACK_UNDERFLOW(0);
14454 IEM_MC_ENDIF();
14455 IEM_MC_ADVANCE_RIP();
14456
14457 IEM_MC_END();
14458 return VINF_SUCCESS;
14459}
14460
14461
14462/** Opcode 0xda 11/2. */
14463FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14464{
14465 IEMOP_MNEMONIC("fcmovbe st0,stN");
14466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14467
14468 IEM_MC_BEGIN(0, 1);
14469 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14470
14471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14473
14474 IEM_MC_PREPARE_FPU_USAGE();
14475 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14476 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14477 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14478 IEM_MC_ENDIF();
14479 IEM_MC_UPDATE_FPU_OPCODE_IP();
14480 IEM_MC_ELSE()
14481 IEM_MC_FPU_STACK_UNDERFLOW(0);
14482 IEM_MC_ENDIF();
14483 IEM_MC_ADVANCE_RIP();
14484
14485 IEM_MC_END();
14486 return VINF_SUCCESS;
14487}
14488
14489
14490/** Opcode 0xda 11/3. */
14491FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14492{
14493 IEMOP_MNEMONIC("fcmovu st0,stN");
14494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14495
14496 IEM_MC_BEGIN(0, 1);
14497 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14498
14499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14501
14502 IEM_MC_PREPARE_FPU_USAGE();
14503 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14504 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14505 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14506 IEM_MC_ENDIF();
14507 IEM_MC_UPDATE_FPU_OPCODE_IP();
14508 IEM_MC_ELSE()
14509 IEM_MC_FPU_STACK_UNDERFLOW(0);
14510 IEM_MC_ENDIF();
14511 IEM_MC_ADVANCE_RIP();
14512
14513 IEM_MC_END();
14514 return VINF_SUCCESS;
14515}
14516
14517
14518/**
14519 * Common worker for FPU instructions working on ST0 and STn, only affecting
14520 * flags, and popping twice when done.
14521 *
14522 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14523 */
14524FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14525{
14526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14527
14528 IEM_MC_BEGIN(3, 1);
14529 IEM_MC_LOCAL(uint16_t, u16Fsw);
14530 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14533
14534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14536
14537 IEM_MC_PREPARE_FPU_USAGE();
14538 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14539 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14540 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14541 IEM_MC_ELSE()
14542 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14543 IEM_MC_ENDIF();
14544 IEM_MC_ADVANCE_RIP();
14545
14546 IEM_MC_END();
14547 return VINF_SUCCESS;
14548}
14549
14550
14551/** Opcode 0xda 0xe9. */
14552FNIEMOP_DEF(iemOp_fucompp)
14553{
14554 IEMOP_MNEMONIC("fucompp st0,stN");
14555 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14556}
14557
14558
14559/**
14560 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14561 * the result in ST0.
14562 *
14563 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14564 */
14565FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14566{
14567 IEM_MC_BEGIN(3, 3);
14568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14569 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14570 IEM_MC_LOCAL(int32_t, i32Val2);
14571 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14572 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14573 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14574
14575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14577
14578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14580 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14581
14582 IEM_MC_PREPARE_FPU_USAGE();
14583 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14584 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14585 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14586 IEM_MC_ELSE()
14587 IEM_MC_FPU_STACK_UNDERFLOW(0);
14588 IEM_MC_ENDIF();
14589 IEM_MC_ADVANCE_RIP();
14590
14591 IEM_MC_END();
14592 return VINF_SUCCESS;
14593}
14594
14595
14596/** Opcode 0xda !11/0. */
14597FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14598{
14599 IEMOP_MNEMONIC("fiadd m32i");
14600 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14601}
14602
14603
14604/** Opcode 0xda !11/1. */
14605FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14606{
14607 IEMOP_MNEMONIC("fimul m32i");
14608 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14609}
14610
14611
14612/** Opcode 0xda !11/2. */
14613FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14614{
14615 IEMOP_MNEMONIC("ficom st0,m32i");
14616
14617 IEM_MC_BEGIN(3, 3);
14618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14619 IEM_MC_LOCAL(uint16_t, u16Fsw);
14620 IEM_MC_LOCAL(int32_t, i32Val2);
14621 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14622 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14623 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14624
14625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14627
14628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14630 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14631
14632 IEM_MC_PREPARE_FPU_USAGE();
14633 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14634 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14635 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14636 IEM_MC_ELSE()
14637 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14638 IEM_MC_ENDIF();
14639 IEM_MC_ADVANCE_RIP();
14640
14641 IEM_MC_END();
14642 return VINF_SUCCESS;
14643}
14644
14645
14646/** Opcode 0xda !11/3. */
14647FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14648{
14649 IEMOP_MNEMONIC("ficomp st0,m32i");
14650
14651 IEM_MC_BEGIN(3, 3);
14652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14653 IEM_MC_LOCAL(uint16_t, u16Fsw);
14654 IEM_MC_LOCAL(int32_t, i32Val2);
14655 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14656 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14657 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14658
14659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14661
14662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14664 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14665
14666 IEM_MC_PREPARE_FPU_USAGE();
14667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14668 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14669 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14670 IEM_MC_ELSE()
14671 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14672 IEM_MC_ENDIF();
14673 IEM_MC_ADVANCE_RIP();
14674
14675 IEM_MC_END();
14676 return VINF_SUCCESS;
14677}
14678
14679
14680/** Opcode 0xda !11/4. */
14681FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14682{
14683 IEMOP_MNEMONIC("fisub m32i");
14684 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14685}
14686
14687
14688/** Opcode 0xda !11/5. */
14689FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14690{
14691 IEMOP_MNEMONIC("fisubr m32i");
14692 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14693}
14694
14695
14696/** Opcode 0xda !11/6. */
14697FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14698{
14699 IEMOP_MNEMONIC("fidiv m32i");
14700 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14701}
14702
14703
14704/** Opcode 0xda !11/7. */
14705FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14706{
14707 IEMOP_MNEMONIC("fidivr m32i");
14708 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14709}
14710
14711
14712/** Opcode 0xda. */
14713FNIEMOP_DEF(iemOp_EscF2)
14714{
14715 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14718 {
14719 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14720 {
14721 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14722 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14723 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14724 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14725 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14726 case 5:
14727 if (bRm == 0xe9)
14728 return FNIEMOP_CALL(iemOp_fucompp);
14729 return IEMOP_RAISE_INVALID_OPCODE();
14730 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14731 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14733 }
14734 }
14735 else
14736 {
14737 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14738 {
14739 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14740 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14741 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14742 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14743 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14744 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14745 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14746 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14748 }
14749 }
14750}
14751
14752
14753/** Opcode 0xdb !11/0. */
14754FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14755{
14756 IEMOP_MNEMONIC("fild m32i");
14757
14758 IEM_MC_BEGIN(2, 3);
14759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14760 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14761 IEM_MC_LOCAL(int32_t, i32Val);
14762 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14763 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14764
14765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14767
14768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14770 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14771
14772 IEM_MC_PREPARE_FPU_USAGE();
14773 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14774 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14775 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14776 IEM_MC_ELSE()
14777 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14778 IEM_MC_ENDIF();
14779 IEM_MC_ADVANCE_RIP();
14780
14781 IEM_MC_END();
14782 return VINF_SUCCESS;
14783}
14784
14785
14786/** Opcode 0xdb !11/1. */
14787FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14788{
14789 IEMOP_MNEMONIC("fisttp m32i");
14790 IEM_MC_BEGIN(3, 2);
14791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14792 IEM_MC_LOCAL(uint16_t, u16Fsw);
14793 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14794 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14795 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14796
14797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14799 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14800 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14801
14802 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14803 IEM_MC_PREPARE_FPU_USAGE();
14804 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14805 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14806 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14807 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14808 IEM_MC_ELSE()
14809 IEM_MC_IF_FCW_IM()
14810 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14811 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14812 IEM_MC_ENDIF();
14813 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14814 IEM_MC_ENDIF();
14815 IEM_MC_ADVANCE_RIP();
14816
14817 IEM_MC_END();
14818 return VINF_SUCCESS;
14819}
14820
14821
14822/** Opcode 0xdb !11/2. */
14823FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14824{
14825 IEMOP_MNEMONIC("fist m32i");
14826 IEM_MC_BEGIN(3, 2);
14827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14828 IEM_MC_LOCAL(uint16_t, u16Fsw);
14829 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14830 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14832
14833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14837
14838 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14839 IEM_MC_PREPARE_FPU_USAGE();
14840 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14841 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14842 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14843 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14844 IEM_MC_ELSE()
14845 IEM_MC_IF_FCW_IM()
14846 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14847 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14848 IEM_MC_ENDIF();
14849 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14850 IEM_MC_ENDIF();
14851 IEM_MC_ADVANCE_RIP();
14852
14853 IEM_MC_END();
14854 return VINF_SUCCESS;
14855}
14856
14857
14858/** Opcode 0xdb !11/3. */
14859FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14860{
14861 IEMOP_MNEMONIC("fisttp m32i");
14862 IEM_MC_BEGIN(3, 2);
14863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14864 IEM_MC_LOCAL(uint16_t, u16Fsw);
14865 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14866 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14867 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14868
14869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14873
14874 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14875 IEM_MC_PREPARE_FPU_USAGE();
14876 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14877 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14878 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14879 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14880 IEM_MC_ELSE()
14881 IEM_MC_IF_FCW_IM()
14882 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14883 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14884 IEM_MC_ENDIF();
14885 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14886 IEM_MC_ENDIF();
14887 IEM_MC_ADVANCE_RIP();
14888
14889 IEM_MC_END();
14890 return VINF_SUCCESS;
14891}
14892
14893
14894/** Opcode 0xdb !11/5. */
14895FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14896{
14897 IEMOP_MNEMONIC("fld m80r");
14898
14899 IEM_MC_BEGIN(2, 3);
14900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14901 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14902 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14903 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14904 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14905
14906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14908
14909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14911 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14912
14913 IEM_MC_PREPARE_FPU_USAGE();
14914 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14915 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14916 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14917 IEM_MC_ELSE()
14918 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14919 IEM_MC_ENDIF();
14920 IEM_MC_ADVANCE_RIP();
14921
14922 IEM_MC_END();
14923 return VINF_SUCCESS;
14924}
14925
14926
14927/** Opcode 0xdb !11/7. */
14928FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC("fstp m80r");
14931 IEM_MC_BEGIN(3, 2);
14932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14933 IEM_MC_LOCAL(uint16_t, u16Fsw);
14934 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14935 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14937
14938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14942
14943 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14944 IEM_MC_PREPARE_FPU_USAGE();
14945 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14946 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14947 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14948 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14949 IEM_MC_ELSE()
14950 IEM_MC_IF_FCW_IM()
14951 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14952 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14953 IEM_MC_ENDIF();
14954 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14955 IEM_MC_ENDIF();
14956 IEM_MC_ADVANCE_RIP();
14957
14958 IEM_MC_END();
14959 return VINF_SUCCESS;
14960}
14961
14962
14963/** Opcode 0xdb 11/0. */
14964FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14965{
14966 IEMOP_MNEMONIC("fcmovnb st0,stN");
14967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14968
14969 IEM_MC_BEGIN(0, 1);
14970 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14971
14972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14973 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14974
14975 IEM_MC_PREPARE_FPU_USAGE();
14976 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14977 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14978 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14979 IEM_MC_ENDIF();
14980 IEM_MC_UPDATE_FPU_OPCODE_IP();
14981 IEM_MC_ELSE()
14982 IEM_MC_FPU_STACK_UNDERFLOW(0);
14983 IEM_MC_ENDIF();
14984 IEM_MC_ADVANCE_RIP();
14985
14986 IEM_MC_END();
14987 return VINF_SUCCESS;
14988}
14989
14990
14991/** Opcode 0xdb 11/1. */
14992FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14993{
14994 IEMOP_MNEMONIC("fcmovne st0,stN");
14995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14996
14997 IEM_MC_BEGIN(0, 1);
14998 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14999
15000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15002
15003 IEM_MC_PREPARE_FPU_USAGE();
15004 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15005 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15006 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15007 IEM_MC_ENDIF();
15008 IEM_MC_UPDATE_FPU_OPCODE_IP();
15009 IEM_MC_ELSE()
15010 IEM_MC_FPU_STACK_UNDERFLOW(0);
15011 IEM_MC_ENDIF();
15012 IEM_MC_ADVANCE_RIP();
15013
15014 IEM_MC_END();
15015 return VINF_SUCCESS;
15016}
15017
15018
15019/** Opcode 0xdb 11/2. */
15020FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15021{
15022 IEMOP_MNEMONIC("fcmovnbe st0,stN");
15023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15024
15025 IEM_MC_BEGIN(0, 1);
15026 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15027
15028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15030
15031 IEM_MC_PREPARE_FPU_USAGE();
15032 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15033 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15034 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15035 IEM_MC_ENDIF();
15036 IEM_MC_UPDATE_FPU_OPCODE_IP();
15037 IEM_MC_ELSE()
15038 IEM_MC_FPU_STACK_UNDERFLOW(0);
15039 IEM_MC_ENDIF();
15040 IEM_MC_ADVANCE_RIP();
15041
15042 IEM_MC_END();
15043 return VINF_SUCCESS;
15044}
15045
15046
15047/** Opcode 0xdb 11/3. */
15048FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15049{
15050 IEMOP_MNEMONIC("fcmovnnu st0,stN");
15051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15052
15053 IEM_MC_BEGIN(0, 1);
15054 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15055
15056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15058
15059 IEM_MC_PREPARE_FPU_USAGE();
15060 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15061 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15062 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15063 IEM_MC_ENDIF();
15064 IEM_MC_UPDATE_FPU_OPCODE_IP();
15065 IEM_MC_ELSE()
15066 IEM_MC_FPU_STACK_UNDERFLOW(0);
15067 IEM_MC_ENDIF();
15068 IEM_MC_ADVANCE_RIP();
15069
15070 IEM_MC_END();
15071 return VINF_SUCCESS;
15072}
15073
15074
15075/** Opcode 0xdb 0xe0. */
15076FNIEMOP_DEF(iemOp_fneni)
15077{
15078 IEMOP_MNEMONIC("fneni (8087/ign)");
15079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15080 IEM_MC_BEGIN(0,0);
15081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15082 IEM_MC_ADVANCE_RIP();
15083 IEM_MC_END();
15084 return VINF_SUCCESS;
15085}
15086
15087
15088/** Opcode 0xdb 0xe1. */
15089FNIEMOP_DEF(iemOp_fndisi)
15090{
15091 IEMOP_MNEMONIC("fndisi (8087/ign)");
15092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15093 IEM_MC_BEGIN(0,0);
15094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15095 IEM_MC_ADVANCE_RIP();
15096 IEM_MC_END();
15097 return VINF_SUCCESS;
15098}
15099
15100
15101/** Opcode 0xdb 0xe2. */
15102FNIEMOP_DEF(iemOp_fnclex)
15103{
15104 IEMOP_MNEMONIC("fnclex");
15105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15106
15107 IEM_MC_BEGIN(0,0);
15108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15109 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15110 IEM_MC_CLEAR_FSW_EX();
15111 IEM_MC_ADVANCE_RIP();
15112 IEM_MC_END();
15113 return VINF_SUCCESS;
15114}
15115
15116
15117/** Opcode 0xdb 0xe3. */
15118FNIEMOP_DEF(iemOp_fninit)
15119{
15120 IEMOP_MNEMONIC("fninit");
15121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15122 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15123}
15124
15125
15126/** Opcode 0xdb 0xe4. */
15127FNIEMOP_DEF(iemOp_fnsetpm)
15128{
15129 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15131 IEM_MC_BEGIN(0,0);
15132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15133 IEM_MC_ADVANCE_RIP();
15134 IEM_MC_END();
15135 return VINF_SUCCESS;
15136}
15137
15138
15139/** Opcode 0xdb 0xe5. */
15140FNIEMOP_DEF(iemOp_frstpm)
15141{
15142 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15143#if 0 /* #UDs on newer CPUs */
15144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15145 IEM_MC_BEGIN(0,0);
15146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15147 IEM_MC_ADVANCE_RIP();
15148 IEM_MC_END();
15149 return VINF_SUCCESS;
15150#else
15151 return IEMOP_RAISE_INVALID_OPCODE();
15152#endif
15153}
15154
15155
15156/** Opcode 0xdb 11/5. */
15157FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15158{
15159 IEMOP_MNEMONIC("fucomi st0,stN");
15160 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15161}
15162
15163
15164/** Opcode 0xdb 11/6. */
15165FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15166{
15167 IEMOP_MNEMONIC("fcomi st0,stN");
15168 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15169}
15170
15171
15172/** Opcode 0xdb. */
15173FNIEMOP_DEF(iemOp_EscF3)
15174{
15175 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15178 {
15179 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15180 {
15181 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15182 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15183 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15184 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15185 case 4:
15186 switch (bRm)
15187 {
15188 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15189 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15190 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15191 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15192 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15193 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15194 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15195 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15197 }
15198 break;
15199 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15200 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15201 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15203 }
15204 }
15205 else
15206 {
15207 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15208 {
15209 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15210 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15211 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15212 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15213 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15214 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15215 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15216 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15218 }
15219 }
15220}
15221
15222
15223/**
15224 * Common worker for FPU instructions working on STn and ST0, and storing the
15225 * result in STn unless IE, DE or ZE was raised.
15226 *
15227 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15228 */
15229FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15230{
15231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15232
15233 IEM_MC_BEGIN(3, 1);
15234 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15235 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15237 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15238
15239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15241
15242 IEM_MC_PREPARE_FPU_USAGE();
15243 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15244 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15245 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15246 IEM_MC_ELSE()
15247 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15248 IEM_MC_ENDIF();
15249 IEM_MC_ADVANCE_RIP();
15250
15251 IEM_MC_END();
15252 return VINF_SUCCESS;
15253}
15254
15255
15256/** Opcode 0xdc 11/0. */
15257FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15258{
15259 IEMOP_MNEMONIC("fadd stN,st0");
15260 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15261}
15262
15263
15264/** Opcode 0xdc 11/1. */
15265FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15266{
15267 IEMOP_MNEMONIC("fmul stN,st0");
15268 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15269}
15270
15271
15272/** Opcode 0xdc 11/4. */
15273FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15274{
15275 IEMOP_MNEMONIC("fsubr stN,st0");
15276 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15277}
15278
15279
15280/** Opcode 0xdc 11/5. */
15281FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15282{
15283 IEMOP_MNEMONIC("fsub stN,st0");
15284 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15285}
15286
15287
15288/** Opcode 0xdc 11/6. */
15289FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15290{
15291 IEMOP_MNEMONIC("fdivr stN,st0");
15292 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15293}
15294
15295
15296/** Opcode 0xdc 11/7. */
15297FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15298{
15299 IEMOP_MNEMONIC("fdiv stN,st0");
15300 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15301}
15302
15303
15304/**
15305 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15306 * memory operand, and storing the result in ST0.
15307 *
15308 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15309 */
15310FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15311{
15312 IEM_MC_BEGIN(3, 3);
15313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15314 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15315 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15316 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15317 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15318 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15319
15320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15324
15325 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15326 IEM_MC_PREPARE_FPU_USAGE();
15327 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15328 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15329 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15330 IEM_MC_ELSE()
15331 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15332 IEM_MC_ENDIF();
15333 IEM_MC_ADVANCE_RIP();
15334
15335 IEM_MC_END();
15336 return VINF_SUCCESS;
15337}
15338
15339
15340/** Opcode 0xdc !11/0. */
15341FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15342{
15343 IEMOP_MNEMONIC("fadd m64r");
15344 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15345}
15346
15347
15348/** Opcode 0xdc !11/1. */
15349FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15350{
15351 IEMOP_MNEMONIC("fmul m64r");
15352 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15353}
15354
15355
15356/** Opcode 0xdc !11/2. */
15357FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15358{
15359 IEMOP_MNEMONIC("fcom st0,m64r");
15360
15361 IEM_MC_BEGIN(3, 3);
15362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15363 IEM_MC_LOCAL(uint16_t, u16Fsw);
15364 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15366 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15367 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15368
15369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15371
15372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15374 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15375
15376 IEM_MC_PREPARE_FPU_USAGE();
15377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15379 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15380 IEM_MC_ELSE()
15381 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15382 IEM_MC_ENDIF();
15383 IEM_MC_ADVANCE_RIP();
15384
15385 IEM_MC_END();
15386 return VINF_SUCCESS;
15387}
15388
15389
15390/** Opcode 0xdc !11/3. */
15391FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15392{
15393 IEMOP_MNEMONIC("fcomp st0,m64r");
15394
15395 IEM_MC_BEGIN(3, 3);
15396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15397 IEM_MC_LOCAL(uint16_t, u16Fsw);
15398 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15399 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15400 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15401 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15402
15403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15405
15406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15408 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15409
15410 IEM_MC_PREPARE_FPU_USAGE();
15411 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15412 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15413 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15414 IEM_MC_ELSE()
15415 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15416 IEM_MC_ENDIF();
15417 IEM_MC_ADVANCE_RIP();
15418
15419 IEM_MC_END();
15420 return VINF_SUCCESS;
15421}
15422
15423
15424/** Opcode 0xdc !11/4. */
15425FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15426{
15427 IEMOP_MNEMONIC("fsub m64r");
15428 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15429}
15430
15431
15432/** Opcode 0xdc !11/5. */
15433FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15434{
15435 IEMOP_MNEMONIC("fsubr m64r");
15436 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15437}
15438
15439
15440/** Opcode 0xdc !11/6. */
15441FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15442{
15443 IEMOP_MNEMONIC("fdiv m64r");
15444 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15445}
15446
15447
15448/** Opcode 0xdc !11/7. */
15449FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15450{
15451 IEMOP_MNEMONIC("fdivr m64r");
15452 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15453}
15454
15455
15456/** Opcode 0xdc. */
15457FNIEMOP_DEF(iemOp_EscF4)
15458{
15459 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15462 {
15463 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15464 {
15465 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15466 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15467 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15468 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15469 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15470 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15471 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15472 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15474 }
15475 }
15476 else
15477 {
15478 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15479 {
15480 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15481 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15482 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15483 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15484 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15485 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15486 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15487 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15489 }
15490 }
15491}
15492
15493
15494/** Opcode 0xdd !11/0.
15495 * @sa iemOp_fld_m32r */
15496FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15497{
15498 IEMOP_MNEMONIC("fld m64r");
15499
15500 IEM_MC_BEGIN(2, 3);
15501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15502 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15503 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15504 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15505 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15506
15507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15511
15512 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15513 IEM_MC_PREPARE_FPU_USAGE();
15514 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15515 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15516 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15517 IEM_MC_ELSE()
15518 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15519 IEM_MC_ENDIF();
15520 IEM_MC_ADVANCE_RIP();
15521
15522 IEM_MC_END();
15523 return VINF_SUCCESS;
15524}
15525
15526
15527/** Opcode 0xdd !11/0. */
15528FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15529{
15530 IEMOP_MNEMONIC("fisttp m64i");
15531 IEM_MC_BEGIN(3, 2);
15532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15533 IEM_MC_LOCAL(uint16_t, u16Fsw);
15534 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15535 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15536 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15537
15538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15542
15543 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15544 IEM_MC_PREPARE_FPU_USAGE();
15545 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15546 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15547 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15548 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15549 IEM_MC_ELSE()
15550 IEM_MC_IF_FCW_IM()
15551 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15552 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15553 IEM_MC_ENDIF();
15554 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15555 IEM_MC_ENDIF();
15556 IEM_MC_ADVANCE_RIP();
15557
15558 IEM_MC_END();
15559 return VINF_SUCCESS;
15560}
15561
15562
15563/** Opcode 0xdd !11/0. */
15564FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15565{
15566 IEMOP_MNEMONIC("fst m64r");
15567 IEM_MC_BEGIN(3, 2);
15568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15569 IEM_MC_LOCAL(uint16_t, u16Fsw);
15570 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15571 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15572 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15573
15574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15577 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15578
15579 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15580 IEM_MC_PREPARE_FPU_USAGE();
15581 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15582 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15583 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15584 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15585 IEM_MC_ELSE()
15586 IEM_MC_IF_FCW_IM()
15587 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15588 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15589 IEM_MC_ENDIF();
15590 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15591 IEM_MC_ENDIF();
15592 IEM_MC_ADVANCE_RIP();
15593
15594 IEM_MC_END();
15595 return VINF_SUCCESS;
15596}
15597
15598
15599
15600
15601/** Opcode 0xdd !11/0. */
15602FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15603{
15604 IEMOP_MNEMONIC("fstp m64r");
15605 IEM_MC_BEGIN(3, 2);
15606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15607 IEM_MC_LOCAL(uint16_t, u16Fsw);
15608 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15609 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15610 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15611
15612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15616
15617 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15618 IEM_MC_PREPARE_FPU_USAGE();
15619 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15620 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15621 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15622 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15623 IEM_MC_ELSE()
15624 IEM_MC_IF_FCW_IM()
15625 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15626 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15627 IEM_MC_ENDIF();
15628 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15629 IEM_MC_ENDIF();
15630 IEM_MC_ADVANCE_RIP();
15631
15632 IEM_MC_END();
15633 return VINF_SUCCESS;
15634}
15635
15636
15637/** Opcode 0xdd !11/0. */
15638FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15639{
15640 IEMOP_MNEMONIC("frstor m94/108byte");
15641 IEM_MC_BEGIN(3, 0);
15642 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15643 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15644 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15648 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15649 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15650 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15651 IEM_MC_END();
15652 return VINF_SUCCESS;
15653}
15654
15655
15656/** Opcode 0xdd !11/0. */
15657FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15658{
15659 IEMOP_MNEMONIC("fnsave m94/108byte");
15660 IEM_MC_BEGIN(3, 0);
15661 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15662 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15663 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15667 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15668 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15669 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15670 IEM_MC_END();
15671 return VINF_SUCCESS;
15672
15673}
15674
15675/** Opcode 0xdd !11/0. */
15676FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15677{
15678 IEMOP_MNEMONIC("fnstsw m16");
15679
15680 IEM_MC_BEGIN(0, 2);
15681 IEM_MC_LOCAL(uint16_t, u16Tmp);
15682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15683
15684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15687
15688 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15689 IEM_MC_FETCH_FSW(u16Tmp);
15690 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15691 IEM_MC_ADVANCE_RIP();
15692
15693/** @todo Debug / drop a hint to the verifier that things may differ
15694 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15695 * NT4SP1. (X86_FSW_PE) */
15696 IEM_MC_END();
15697 return VINF_SUCCESS;
15698}
15699
15700
15701/** Opcode 0xdd 11/0. */
15702FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15703{
15704 IEMOP_MNEMONIC("ffree stN");
15705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15706 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15707 unmodified. */
15708
15709 IEM_MC_BEGIN(0, 0);
15710
15711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15713
15714 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15715 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15716 IEM_MC_UPDATE_FPU_OPCODE_IP();
15717
15718 IEM_MC_ADVANCE_RIP();
15719 IEM_MC_END();
15720 return VINF_SUCCESS;
15721}
15722
15723
15724/** Opcode 0xdd 11/1. */
15725FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15726{
15727 IEMOP_MNEMONIC("fst st0,stN");
15728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15729
15730 IEM_MC_BEGIN(0, 2);
15731 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15732 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15735
15736 IEM_MC_PREPARE_FPU_USAGE();
15737 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15738 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15739 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15740 IEM_MC_ELSE()
15741 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15742 IEM_MC_ENDIF();
15743
15744 IEM_MC_ADVANCE_RIP();
15745 IEM_MC_END();
15746 return VINF_SUCCESS;
15747}
15748
15749
15750/** Opcode 0xdd 11/3. */
15751FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15752{
15753 IEMOP_MNEMONIC("fcom st0,stN");
15754 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15755}
15756
15757
15758/** Opcode 0xdd 11/4. */
15759FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15760{
15761 IEMOP_MNEMONIC("fcomp st0,stN");
15762 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15763}
15764
15765
15766/** Opcode 0xdd. */
15767FNIEMOP_DEF(iemOp_EscF5)
15768{
15769 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15772 {
15773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15774 {
15775 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15776 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15777 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15778 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15779 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15780 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15784 }
15785 }
15786 else
15787 {
15788 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15789 {
15790 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15791 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15792 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15793 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15794 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15795 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15796 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15797 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15799 }
15800 }
15801}
15802
15803
15804/** Opcode 0xde 11/0. */
15805FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15806{
15807 IEMOP_MNEMONIC("faddp stN,st0");
15808 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15809}
15810
15811
15812/** Opcode 0xde 11/0. */
15813FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15814{
15815 IEMOP_MNEMONIC("fmulp stN,st0");
15816 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15817}
15818
15819
15820/** Opcode 0xde 0xd9. */
15821FNIEMOP_DEF(iemOp_fcompp)
15822{
15823 IEMOP_MNEMONIC("fucompp st0,stN");
15824 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15825}
15826
15827
15828/** Opcode 0xde 11/4. */
15829FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15830{
15831 IEMOP_MNEMONIC("fsubrp stN,st0");
15832 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15833}
15834
15835
15836/** Opcode 0xde 11/5. */
15837FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15838{
15839 IEMOP_MNEMONIC("fsubp stN,st0");
15840 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15841}
15842
15843
15844/** Opcode 0xde 11/6. */
15845FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15846{
15847 IEMOP_MNEMONIC("fdivrp stN,st0");
15848 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15849}
15850
15851
15852/** Opcode 0xde 11/7. */
15853FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15854{
15855 IEMOP_MNEMONIC("fdivp stN,st0");
15856 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15857}
15858
15859
15860/**
15861 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15862 * the result in ST0.
15863 *
15864 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15865 */
15866FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15867{
15868 IEM_MC_BEGIN(3, 3);
15869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15870 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15871 IEM_MC_LOCAL(int16_t, i16Val2);
15872 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15874 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15875
15876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15878
15879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15880 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15881 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15882
15883 IEM_MC_PREPARE_FPU_USAGE();
15884 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15885 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15886 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15887 IEM_MC_ELSE()
15888 IEM_MC_FPU_STACK_UNDERFLOW(0);
15889 IEM_MC_ENDIF();
15890 IEM_MC_ADVANCE_RIP();
15891
15892 IEM_MC_END();
15893 return VINF_SUCCESS;
15894}
15895
15896
15897/** Opcode 0xde !11/0. */
15898FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15899{
15900 IEMOP_MNEMONIC("fiadd m16i");
15901 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15902}
15903
15904
15905/** Opcode 0xde !11/1. */
15906FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15907{
15908 IEMOP_MNEMONIC("fimul m16i");
15909 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15910}
15911
15912
15913/** Opcode 0xde !11/2. */
15914FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15915{
15916 IEMOP_MNEMONIC("ficom st0,m16i");
15917
15918 IEM_MC_BEGIN(3, 3);
15919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15920 IEM_MC_LOCAL(uint16_t, u16Fsw);
15921 IEM_MC_LOCAL(int16_t, i16Val2);
15922 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15923 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15924 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15925
15926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15928
15929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15931 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15932
15933 IEM_MC_PREPARE_FPU_USAGE();
15934 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15935 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15936 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15937 IEM_MC_ELSE()
15938 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15939 IEM_MC_ENDIF();
15940 IEM_MC_ADVANCE_RIP();
15941
15942 IEM_MC_END();
15943 return VINF_SUCCESS;
15944}
15945
15946
15947/** Opcode 0xde !11/3. */
15948FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15949{
15950 IEMOP_MNEMONIC("ficomp st0,m16i");
15951
15952 IEM_MC_BEGIN(3, 3);
15953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15954 IEM_MC_LOCAL(uint16_t, u16Fsw);
15955 IEM_MC_LOCAL(int16_t, i16Val2);
15956 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15957 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15958 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15959
15960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15962
15963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15965 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15966
15967 IEM_MC_PREPARE_FPU_USAGE();
15968 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15969 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15970 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15971 IEM_MC_ELSE()
15972 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15973 IEM_MC_ENDIF();
15974 IEM_MC_ADVANCE_RIP();
15975
15976 IEM_MC_END();
15977 return VINF_SUCCESS;
15978}
15979
15980
15981/** Opcode 0xde !11/4. */
15982FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15983{
15984 IEMOP_MNEMONIC("fisub m16i");
15985 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15986}
15987
15988
15989/** Opcode 0xde !11/5. */
15990FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15991{
15992 IEMOP_MNEMONIC("fisubr m16i");
15993 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15994}
15995
15996
15997/** Opcode 0xde !11/6. */
15998FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15999{
16000 IEMOP_MNEMONIC("fiadd m16i");
16001 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16002}
16003
16004
16005/** Opcode 0xde !11/7. */
16006FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16007{
16008 IEMOP_MNEMONIC("fiadd m16i");
16009 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16010}
16011
16012
16013/** Opcode 0xde. */
16014FNIEMOP_DEF(iemOp_EscF6)
16015{
16016 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
16017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16019 {
16020 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16021 {
16022 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16023 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16024 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16025 case 3: if (bRm == 0xd9)
16026 return FNIEMOP_CALL(iemOp_fcompp);
16027 return IEMOP_RAISE_INVALID_OPCODE();
16028 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16029 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16030 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16031 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16033 }
16034 }
16035 else
16036 {
16037 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16038 {
16039 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16040 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16041 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16042 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16043 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16044 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16045 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16046 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16048 }
16049 }
16050}
16051
16052
16053/** Opcode 0xdf 11/0.
16054 * Undocument instruction, assumed to work like ffree + fincstp. */
16055FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16056{
16057 IEMOP_MNEMONIC("ffreep stN");
16058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16059
16060 IEM_MC_BEGIN(0, 0);
16061
16062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16064
16065 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16066 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16067 IEM_MC_FPU_STACK_INC_TOP();
16068 IEM_MC_UPDATE_FPU_OPCODE_IP();
16069
16070 IEM_MC_ADVANCE_RIP();
16071 IEM_MC_END();
16072 return VINF_SUCCESS;
16073}
16074
16075
16076/** Opcode 0xdf 0xe0. */
16077FNIEMOP_DEF(iemOp_fnstsw_ax)
16078{
16079 IEMOP_MNEMONIC("fnstsw ax");
16080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16081
16082 IEM_MC_BEGIN(0, 1);
16083 IEM_MC_LOCAL(uint16_t, u16Tmp);
16084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16085 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16086 IEM_MC_FETCH_FSW(u16Tmp);
16087 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16088 IEM_MC_ADVANCE_RIP();
16089 IEM_MC_END();
16090 return VINF_SUCCESS;
16091}
16092
16093
16094/** Opcode 0xdf 11/5. */
16095FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16096{
16097 IEMOP_MNEMONIC("fcomip st0,stN");
16098 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16099}
16100
16101
16102/** Opcode 0xdf 11/6. */
16103FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16104{
16105 IEMOP_MNEMONIC("fcomip st0,stN");
16106 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16107}
16108
16109
16110/** Opcode 0xdf !11/0. */
16111FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16112{
16113 IEMOP_MNEMONIC("fild m16i");
16114
16115 IEM_MC_BEGIN(2, 3);
16116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16117 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16118 IEM_MC_LOCAL(int16_t, i16Val);
16119 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16120 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16121
16122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16124
16125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16127 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16128
16129 IEM_MC_PREPARE_FPU_USAGE();
16130 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16131 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16132 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16133 IEM_MC_ELSE()
16134 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16135 IEM_MC_ENDIF();
16136 IEM_MC_ADVANCE_RIP();
16137
16138 IEM_MC_END();
16139 return VINF_SUCCESS;
16140}
16141
16142
16143/** Opcode 0xdf !11/1. */
16144FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16145{
16146 IEMOP_MNEMONIC("fisttp m16i");
16147 IEM_MC_BEGIN(3, 2);
16148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16149 IEM_MC_LOCAL(uint16_t, u16Fsw);
16150 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16151 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16152 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16153
16154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16156 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16157 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16158
16159 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16160 IEM_MC_PREPARE_FPU_USAGE();
16161 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16162 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16163 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16164 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16165 IEM_MC_ELSE()
16166 IEM_MC_IF_FCW_IM()
16167 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16168 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16169 IEM_MC_ENDIF();
16170 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16171 IEM_MC_ENDIF();
16172 IEM_MC_ADVANCE_RIP();
16173
16174 IEM_MC_END();
16175 return VINF_SUCCESS;
16176}
16177
16178
16179/** Opcode 0xdf !11/2. */
16180FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16181{
16182 IEMOP_MNEMONIC("fistp m16i");
16183 IEM_MC_BEGIN(3, 2);
16184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16185 IEM_MC_LOCAL(uint16_t, u16Fsw);
16186 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16187 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16189
16190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16193 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16194
16195 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16196 IEM_MC_PREPARE_FPU_USAGE();
16197 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16198 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16199 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16200 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16201 IEM_MC_ELSE()
16202 IEM_MC_IF_FCW_IM()
16203 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16204 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16205 IEM_MC_ENDIF();
16206 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16207 IEM_MC_ENDIF();
16208 IEM_MC_ADVANCE_RIP();
16209
16210 IEM_MC_END();
16211 return VINF_SUCCESS;
16212}
16213
16214
16215/** Opcode 0xdf !11/3. */
16216FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16217{
16218 IEMOP_MNEMONIC("fistp m16i");
16219 IEM_MC_BEGIN(3, 2);
16220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16221 IEM_MC_LOCAL(uint16_t, u16Fsw);
16222 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16223 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16224 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16225
16226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16230
16231 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16232 IEM_MC_PREPARE_FPU_USAGE();
16233 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16234 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16235 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16236 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16237 IEM_MC_ELSE()
16238 IEM_MC_IF_FCW_IM()
16239 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16240 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16241 IEM_MC_ENDIF();
16242 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16243 IEM_MC_ENDIF();
16244 IEM_MC_ADVANCE_RIP();
16245
16246 IEM_MC_END();
16247 return VINF_SUCCESS;
16248}
16249
16250
16251/** Opcode 0xdf !11/4. */
16252FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16253
16254
16255/** Opcode 0xdf !11/5. */
16256FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16257{
16258 IEMOP_MNEMONIC("fild m64i");
16259
16260 IEM_MC_BEGIN(2, 3);
16261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16262 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16263 IEM_MC_LOCAL(int64_t, i64Val);
16264 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16265 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16266
16267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16269
16270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16272 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16273
16274 IEM_MC_PREPARE_FPU_USAGE();
16275 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16276 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16277 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16278 IEM_MC_ELSE()
16279 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16280 IEM_MC_ENDIF();
16281 IEM_MC_ADVANCE_RIP();
16282
16283 IEM_MC_END();
16284 return VINF_SUCCESS;
16285}
16286
16287
16288/** Opcode 0xdf !11/6. */
16289FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16290
16291
16292/** Opcode 0xdf !11/7. */
16293FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16294{
16295 IEMOP_MNEMONIC("fistp m64i");
16296 IEM_MC_BEGIN(3, 2);
16297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16298 IEM_MC_LOCAL(uint16_t, u16Fsw);
16299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16300 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16302
16303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16307
16308 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16309 IEM_MC_PREPARE_FPU_USAGE();
16310 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16311 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16312 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16313 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16314 IEM_MC_ELSE()
16315 IEM_MC_IF_FCW_IM()
16316 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16317 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16318 IEM_MC_ENDIF();
16319 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16320 IEM_MC_ENDIF();
16321 IEM_MC_ADVANCE_RIP();
16322
16323 IEM_MC_END();
16324 return VINF_SUCCESS;
16325}
16326
16327
16328/** Opcode 0xdf. */
16329FNIEMOP_DEF(iemOp_EscF7)
16330{
16331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16333 {
16334 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16335 {
16336 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16337 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16338 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16339 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16340 case 4: if (bRm == 0xe0)
16341 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16342 return IEMOP_RAISE_INVALID_OPCODE();
16343 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16344 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16345 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16347 }
16348 }
16349 else
16350 {
16351 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16352 {
16353 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16354 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16355 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16356 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16357 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16358 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16359 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16360 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16362 }
16363 }
16364}
16365
16366
16367/** Opcode 0xe0. */
16368FNIEMOP_DEF(iemOp_loopne_Jb)
16369{
16370 IEMOP_MNEMONIC("loopne Jb");
16371 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16372 IEMOP_HLP_NO_LOCK_PREFIX();
16373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16374
16375 switch (pIemCpu->enmEffAddrMode)
16376 {
16377 case IEMMODE_16BIT:
16378 IEM_MC_BEGIN(0,0);
16379 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16380 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16381 IEM_MC_REL_JMP_S8(i8Imm);
16382 } IEM_MC_ELSE() {
16383 IEM_MC_ADVANCE_RIP();
16384 } IEM_MC_ENDIF();
16385 IEM_MC_END();
16386 return VINF_SUCCESS;
16387
16388 case IEMMODE_32BIT:
16389 IEM_MC_BEGIN(0,0);
16390 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16391 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16392 IEM_MC_REL_JMP_S8(i8Imm);
16393 } IEM_MC_ELSE() {
16394 IEM_MC_ADVANCE_RIP();
16395 } IEM_MC_ENDIF();
16396 IEM_MC_END();
16397 return VINF_SUCCESS;
16398
16399 case IEMMODE_64BIT:
16400 IEM_MC_BEGIN(0,0);
16401 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16402 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16403 IEM_MC_REL_JMP_S8(i8Imm);
16404 } IEM_MC_ELSE() {
16405 IEM_MC_ADVANCE_RIP();
16406 } IEM_MC_ENDIF();
16407 IEM_MC_END();
16408 return VINF_SUCCESS;
16409
16410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16411 }
16412}
16413
16414
16415/** Opcode 0xe1. */
16416FNIEMOP_DEF(iemOp_loope_Jb)
16417{
16418 IEMOP_MNEMONIC("loope Jb");
16419 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16420 IEMOP_HLP_NO_LOCK_PREFIX();
16421 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16422
16423 switch (pIemCpu->enmEffAddrMode)
16424 {
16425 case IEMMODE_16BIT:
16426 IEM_MC_BEGIN(0,0);
16427 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16428 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16429 IEM_MC_REL_JMP_S8(i8Imm);
16430 } IEM_MC_ELSE() {
16431 IEM_MC_ADVANCE_RIP();
16432 } IEM_MC_ENDIF();
16433 IEM_MC_END();
16434 return VINF_SUCCESS;
16435
16436 case IEMMODE_32BIT:
16437 IEM_MC_BEGIN(0,0);
16438 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16439 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16440 IEM_MC_REL_JMP_S8(i8Imm);
16441 } IEM_MC_ELSE() {
16442 IEM_MC_ADVANCE_RIP();
16443 } IEM_MC_ENDIF();
16444 IEM_MC_END();
16445 return VINF_SUCCESS;
16446
16447 case IEMMODE_64BIT:
16448 IEM_MC_BEGIN(0,0);
16449 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16450 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16451 IEM_MC_REL_JMP_S8(i8Imm);
16452 } IEM_MC_ELSE() {
16453 IEM_MC_ADVANCE_RIP();
16454 } IEM_MC_ENDIF();
16455 IEM_MC_END();
16456 return VINF_SUCCESS;
16457
16458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16459 }
16460}
16461
16462
16463/** Opcode 0xe2. */
16464FNIEMOP_DEF(iemOp_loop_Jb)
16465{
16466 IEMOP_MNEMONIC("loop Jb");
16467 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16468 IEMOP_HLP_NO_LOCK_PREFIX();
16469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16470
16471 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16472 * using the 32-bit operand size override. How can that be restarted? See
16473 * weird pseudo code in intel manual. */
16474 switch (pIemCpu->enmEffAddrMode)
16475 {
16476 case IEMMODE_16BIT:
16477 IEM_MC_BEGIN(0,0);
16478 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16479 {
16480 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16481 IEM_MC_IF_CX_IS_NZ() {
16482 IEM_MC_REL_JMP_S8(i8Imm);
16483 } IEM_MC_ELSE() {
16484 IEM_MC_ADVANCE_RIP();
16485 } IEM_MC_ENDIF();
16486 }
16487 else
16488 {
16489 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16490 IEM_MC_ADVANCE_RIP();
16491 }
16492 IEM_MC_END();
16493 return VINF_SUCCESS;
16494
16495 case IEMMODE_32BIT:
16496 IEM_MC_BEGIN(0,0);
16497 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16498 {
16499 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16500 IEM_MC_IF_ECX_IS_NZ() {
16501 IEM_MC_REL_JMP_S8(i8Imm);
16502 } IEM_MC_ELSE() {
16503 IEM_MC_ADVANCE_RIP();
16504 } IEM_MC_ENDIF();
16505 }
16506 else
16507 {
16508 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16509 IEM_MC_ADVANCE_RIP();
16510 }
16511 IEM_MC_END();
16512 return VINF_SUCCESS;
16513
16514 case IEMMODE_64BIT:
16515 IEM_MC_BEGIN(0,0);
16516 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16517 {
16518 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16519 IEM_MC_IF_RCX_IS_NZ() {
16520 IEM_MC_REL_JMP_S8(i8Imm);
16521 } IEM_MC_ELSE() {
16522 IEM_MC_ADVANCE_RIP();
16523 } IEM_MC_ENDIF();
16524 }
16525 else
16526 {
16527 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16528 IEM_MC_ADVANCE_RIP();
16529 }
16530 IEM_MC_END();
16531 return VINF_SUCCESS;
16532
16533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16534 }
16535}
16536
16537
16538/** Opcode 0xe3. */
16539FNIEMOP_DEF(iemOp_jecxz_Jb)
16540{
16541 IEMOP_MNEMONIC("jecxz Jb");
16542 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16543 IEMOP_HLP_NO_LOCK_PREFIX();
16544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16545
16546 switch (pIemCpu->enmEffAddrMode)
16547 {
16548 case IEMMODE_16BIT:
16549 IEM_MC_BEGIN(0,0);
16550 IEM_MC_IF_CX_IS_NZ() {
16551 IEM_MC_ADVANCE_RIP();
16552 } IEM_MC_ELSE() {
16553 IEM_MC_REL_JMP_S8(i8Imm);
16554 } IEM_MC_ENDIF();
16555 IEM_MC_END();
16556 return VINF_SUCCESS;
16557
16558 case IEMMODE_32BIT:
16559 IEM_MC_BEGIN(0,0);
16560 IEM_MC_IF_ECX_IS_NZ() {
16561 IEM_MC_ADVANCE_RIP();
16562 } IEM_MC_ELSE() {
16563 IEM_MC_REL_JMP_S8(i8Imm);
16564 } IEM_MC_ENDIF();
16565 IEM_MC_END();
16566 return VINF_SUCCESS;
16567
16568 case IEMMODE_64BIT:
16569 IEM_MC_BEGIN(0,0);
16570 IEM_MC_IF_RCX_IS_NZ() {
16571 IEM_MC_ADVANCE_RIP();
16572 } IEM_MC_ELSE() {
16573 IEM_MC_REL_JMP_S8(i8Imm);
16574 } IEM_MC_ENDIF();
16575 IEM_MC_END();
16576 return VINF_SUCCESS;
16577
16578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16579 }
16580}
16581
16582
16583/** Opcode 0xe4 */
16584FNIEMOP_DEF(iemOp_in_AL_Ib)
16585{
16586 IEMOP_MNEMONIC("in eAX,Ib");
16587 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16588 IEMOP_HLP_NO_LOCK_PREFIX();
16589 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16590}
16591
16592
16593/** Opcode 0xe5 */
16594FNIEMOP_DEF(iemOp_in_eAX_Ib)
16595{
16596 IEMOP_MNEMONIC("in eAX,Ib");
16597 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16598 IEMOP_HLP_NO_LOCK_PREFIX();
16599 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16600}
16601
16602
16603/** Opcode 0xe6 */
16604FNIEMOP_DEF(iemOp_out_Ib_AL)
16605{
16606 IEMOP_MNEMONIC("out Ib,AL");
16607 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16608 IEMOP_HLP_NO_LOCK_PREFIX();
16609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16610}
16611
16612
16613/** Opcode 0xe7 */
16614FNIEMOP_DEF(iemOp_out_Ib_eAX)
16615{
16616 IEMOP_MNEMONIC("out Ib,eAX");
16617 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16618 IEMOP_HLP_NO_LOCK_PREFIX();
16619 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16620}
16621
16622
16623/** Opcode 0xe8. */
16624FNIEMOP_DEF(iemOp_call_Jv)
16625{
16626 IEMOP_MNEMONIC("call Jv");
16627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16628 switch (pIemCpu->enmEffOpSize)
16629 {
16630 case IEMMODE_16BIT:
16631 {
16632 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16633 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16634 }
16635
16636 case IEMMODE_32BIT:
16637 {
16638 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16639 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16640 }
16641
16642 case IEMMODE_64BIT:
16643 {
16644 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16645 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16646 }
16647
16648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16649 }
16650}
16651
16652
16653/** Opcode 0xe9. */
16654FNIEMOP_DEF(iemOp_jmp_Jv)
16655{
16656 IEMOP_MNEMONIC("jmp Jv");
16657 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16658 switch (pIemCpu->enmEffOpSize)
16659 {
16660 case IEMMODE_16BIT:
16661 {
16662 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16663 IEM_MC_BEGIN(0, 0);
16664 IEM_MC_REL_JMP_S16(i16Imm);
16665 IEM_MC_END();
16666 return VINF_SUCCESS;
16667 }
16668
16669 case IEMMODE_64BIT:
16670 case IEMMODE_32BIT:
16671 {
16672 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16673 IEM_MC_BEGIN(0, 0);
16674 IEM_MC_REL_JMP_S32(i32Imm);
16675 IEM_MC_END();
16676 return VINF_SUCCESS;
16677 }
16678
16679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16680 }
16681}
16682
16683
16684/** Opcode 0xea. */
16685FNIEMOP_DEF(iemOp_jmp_Ap)
16686{
16687 IEMOP_MNEMONIC("jmp Ap");
16688 IEMOP_HLP_NO_64BIT();
16689
16690 /* Decode the far pointer address and pass it on to the far call C implementation. */
16691 uint32_t offSeg;
16692 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16693 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16694 else
16695 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16696 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16697 IEMOP_HLP_NO_LOCK_PREFIX();
16698 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16699}
16700
16701
16702/** Opcode 0xeb. */
16703FNIEMOP_DEF(iemOp_jmp_Jb)
16704{
16705 IEMOP_MNEMONIC("jmp Jb");
16706 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16707 IEMOP_HLP_NO_LOCK_PREFIX();
16708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16709
16710 IEM_MC_BEGIN(0, 0);
16711 IEM_MC_REL_JMP_S8(i8Imm);
16712 IEM_MC_END();
16713 return VINF_SUCCESS;
16714}
16715
16716
16717/** Opcode 0xec */
16718FNIEMOP_DEF(iemOp_in_AL_DX)
16719{
16720 IEMOP_MNEMONIC("in AL,DX");
16721 IEMOP_HLP_NO_LOCK_PREFIX();
16722 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16723}
16724
16725
16726/** Opcode 0xed */
16727FNIEMOP_DEF(iemOp_eAX_DX)
16728{
16729 IEMOP_MNEMONIC("in eAX,DX");
16730 IEMOP_HLP_NO_LOCK_PREFIX();
16731 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16732}
16733
16734
16735/** Opcode 0xee */
16736FNIEMOP_DEF(iemOp_out_DX_AL)
16737{
16738 IEMOP_MNEMONIC("out DX,AL");
16739 IEMOP_HLP_NO_LOCK_PREFIX();
16740 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16741}
16742
16743
16744/** Opcode 0xef */
16745FNIEMOP_DEF(iemOp_out_DX_eAX)
16746{
16747 IEMOP_MNEMONIC("out DX,eAX");
16748 IEMOP_HLP_NO_LOCK_PREFIX();
16749 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16750}
16751
16752
16753/** Opcode 0xf0. */
16754FNIEMOP_DEF(iemOp_lock)
16755{
16756 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16757 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16758
16759 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16760 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16761}
16762
16763
16764/** Opcode 0xf1. */
16765FNIEMOP_DEF(iemOp_int_1)
16766{
16767 IEMOP_MNEMONIC("int1"); /* icebp */
16768 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16769 /** @todo testcase! */
16770 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16771}
16772
16773
16774/** Opcode 0xf2. */
16775FNIEMOP_DEF(iemOp_repne)
16776{
16777 /* This overrides any previous REPE prefix. */
16778 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16779 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16780 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16781
16782 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16783 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16784}
16785
16786
16787/** Opcode 0xf3. */
16788FNIEMOP_DEF(iemOp_repe)
16789{
16790 /* This overrides any previous REPNE prefix. */
16791 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16792 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16793 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16794
16795 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16796 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16797}
16798
16799
16800/** Opcode 0xf4. */
16801FNIEMOP_DEF(iemOp_hlt)
16802{
16803 IEMOP_HLP_NO_LOCK_PREFIX();
16804 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16805}
16806
16807
16808/** Opcode 0xf5. */
16809FNIEMOP_DEF(iemOp_cmc)
16810{
16811 IEMOP_MNEMONIC("cmc");
16812 IEMOP_HLP_NO_LOCK_PREFIX();
16813 IEM_MC_BEGIN(0, 0);
16814 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16815 IEM_MC_ADVANCE_RIP();
16816 IEM_MC_END();
16817 return VINF_SUCCESS;
16818}
16819
16820
16821/**
16822 * Common implementation of 'inc/dec/not/neg Eb'.
16823 *
16824 * @param bRm The RM byte.
16825 * @param pImpl The instruction implementation.
16826 */
16827FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16828{
16829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16830 {
16831 /* register access */
16832 IEM_MC_BEGIN(2, 0);
16833 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16834 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16835 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16836 IEM_MC_REF_EFLAGS(pEFlags);
16837 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16838 IEM_MC_ADVANCE_RIP();
16839 IEM_MC_END();
16840 }
16841 else
16842 {
16843 /* memory access. */
16844 IEM_MC_BEGIN(2, 2);
16845 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16848
16849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16850 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16851 IEM_MC_FETCH_EFLAGS(EFlags);
16852 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16853 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16854 else
16855 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16856
16857 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16858 IEM_MC_COMMIT_EFLAGS(EFlags);
16859 IEM_MC_ADVANCE_RIP();
16860 IEM_MC_END();
16861 }
16862 return VINF_SUCCESS;
16863}
16864
16865
16866/**
16867 * Common implementation of 'inc/dec/not/neg Ev'.
16868 *
16869 * @param bRm The RM byte.
16870 * @param pImpl The instruction implementation.
16871 */
16872FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16873{
16874 /* Registers are handled by a common worker. */
16875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16876 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16877
16878 /* Memory we do here. */
16879 switch (pIemCpu->enmEffOpSize)
16880 {
16881 case IEMMODE_16BIT:
16882 IEM_MC_BEGIN(2, 2);
16883 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16884 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16886
16887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16888 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16889 IEM_MC_FETCH_EFLAGS(EFlags);
16890 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16891 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16892 else
16893 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16894
16895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16896 IEM_MC_COMMIT_EFLAGS(EFlags);
16897 IEM_MC_ADVANCE_RIP();
16898 IEM_MC_END();
16899 return VINF_SUCCESS;
16900
16901 case IEMMODE_32BIT:
16902 IEM_MC_BEGIN(2, 2);
16903 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16904 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16906
16907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16908 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16909 IEM_MC_FETCH_EFLAGS(EFlags);
16910 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16911 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16912 else
16913 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16914
16915 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16916 IEM_MC_COMMIT_EFLAGS(EFlags);
16917 IEM_MC_ADVANCE_RIP();
16918 IEM_MC_END();
16919 return VINF_SUCCESS;
16920
16921 case IEMMODE_64BIT:
16922 IEM_MC_BEGIN(2, 2);
16923 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16924 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16926
16927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16928 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16929 IEM_MC_FETCH_EFLAGS(EFlags);
16930 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16931 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16932 else
16933 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16934
16935 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16936 IEM_MC_COMMIT_EFLAGS(EFlags);
16937 IEM_MC_ADVANCE_RIP();
16938 IEM_MC_END();
16939 return VINF_SUCCESS;
16940
16941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16942 }
16943}
16944
16945
16946/** Opcode 0xf6 /0. */
16947FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16948{
16949 IEMOP_MNEMONIC("test Eb,Ib");
16950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16951
16952 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16953 {
16954 /* register access */
16955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16956 IEMOP_HLP_NO_LOCK_PREFIX();
16957
16958 IEM_MC_BEGIN(3, 0);
16959 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16960 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16961 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16962 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16963 IEM_MC_REF_EFLAGS(pEFlags);
16964 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16965 IEM_MC_ADVANCE_RIP();
16966 IEM_MC_END();
16967 }
16968 else
16969 {
16970 /* memory access. */
16971 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16972
16973 IEM_MC_BEGIN(3, 2);
16974 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16975 IEM_MC_ARG(uint8_t, u8Src, 1);
16976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16978
16979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16980 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16981 IEM_MC_ASSIGN(u8Src, u8Imm);
16982 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16983 IEM_MC_FETCH_EFLAGS(EFlags);
16984 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16985
16986 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16987 IEM_MC_COMMIT_EFLAGS(EFlags);
16988 IEM_MC_ADVANCE_RIP();
16989 IEM_MC_END();
16990 }
16991 return VINF_SUCCESS;
16992}
16993
16994
16995/** Opcode 0xf7 /0. */
16996FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16997{
16998 IEMOP_MNEMONIC("test Ev,Iv");
16999 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17000 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17001
17002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17003 {
17004 /* register access */
17005 switch (pIemCpu->enmEffOpSize)
17006 {
17007 case IEMMODE_16BIT:
17008 {
17009 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17010 IEM_MC_BEGIN(3, 0);
17011 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17012 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17014 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17015 IEM_MC_REF_EFLAGS(pEFlags);
17016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17017 IEM_MC_ADVANCE_RIP();
17018 IEM_MC_END();
17019 return VINF_SUCCESS;
17020 }
17021
17022 case IEMMODE_32BIT:
17023 {
17024 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17025 IEM_MC_BEGIN(3, 0);
17026 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17027 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17029 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17030 IEM_MC_REF_EFLAGS(pEFlags);
17031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17032 /* No clearing the high dword here - test doesn't write back the result. */
17033 IEM_MC_ADVANCE_RIP();
17034 IEM_MC_END();
17035 return VINF_SUCCESS;
17036 }
17037
17038 case IEMMODE_64BIT:
17039 {
17040 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17041 IEM_MC_BEGIN(3, 0);
17042 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17043 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17044 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17045 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17046 IEM_MC_REF_EFLAGS(pEFlags);
17047 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17048 IEM_MC_ADVANCE_RIP();
17049 IEM_MC_END();
17050 return VINF_SUCCESS;
17051 }
17052
17053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17054 }
17055 }
17056 else
17057 {
17058 /* memory access. */
17059 switch (pIemCpu->enmEffOpSize)
17060 {
17061 case IEMMODE_16BIT:
17062 {
17063 IEM_MC_BEGIN(3, 2);
17064 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17065 IEM_MC_ARG(uint16_t, u16Src, 1);
17066 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17068
17069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17070 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17071 IEM_MC_ASSIGN(u16Src, u16Imm);
17072 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17073 IEM_MC_FETCH_EFLAGS(EFlags);
17074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17075
17076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17077 IEM_MC_COMMIT_EFLAGS(EFlags);
17078 IEM_MC_ADVANCE_RIP();
17079 IEM_MC_END();
17080 return VINF_SUCCESS;
17081 }
17082
17083 case IEMMODE_32BIT:
17084 {
17085 IEM_MC_BEGIN(3, 2);
17086 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17087 IEM_MC_ARG(uint32_t, u32Src, 1);
17088 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17090
17091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17092 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17093 IEM_MC_ASSIGN(u32Src, u32Imm);
17094 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17095 IEM_MC_FETCH_EFLAGS(EFlags);
17096 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17097
17098 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17099 IEM_MC_COMMIT_EFLAGS(EFlags);
17100 IEM_MC_ADVANCE_RIP();
17101 IEM_MC_END();
17102 return VINF_SUCCESS;
17103 }
17104
17105 case IEMMODE_64BIT:
17106 {
17107 IEM_MC_BEGIN(3, 2);
17108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17109 IEM_MC_ARG(uint64_t, u64Src, 1);
17110 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17112
17113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17114 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17115 IEM_MC_ASSIGN(u64Src, u64Imm);
17116 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17117 IEM_MC_FETCH_EFLAGS(EFlags);
17118 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17119
17120 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17121 IEM_MC_COMMIT_EFLAGS(EFlags);
17122 IEM_MC_ADVANCE_RIP();
17123 IEM_MC_END();
17124 return VINF_SUCCESS;
17125 }
17126
17127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17128 }
17129 }
17130}
17131
17132
17133/** Opcode 0xf6 /4, /5, /6 and /7. */
17134FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17135{
17136 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17137
17138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17139 {
17140 /* register access */
17141 IEMOP_HLP_NO_LOCK_PREFIX();
17142 IEM_MC_BEGIN(3, 1);
17143 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17144 IEM_MC_ARG(uint8_t, u8Value, 1);
17145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17146 IEM_MC_LOCAL(int32_t, rc);
17147
17148 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17149 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17150 IEM_MC_REF_EFLAGS(pEFlags);
17151 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17152 IEM_MC_IF_LOCAL_IS_Z(rc) {
17153 IEM_MC_ADVANCE_RIP();
17154 } IEM_MC_ELSE() {
17155 IEM_MC_RAISE_DIVIDE_ERROR();
17156 } IEM_MC_ENDIF();
17157
17158 IEM_MC_END();
17159 }
17160 else
17161 {
17162 /* memory access. */
17163 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17164
17165 IEM_MC_BEGIN(3, 2);
17166 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17167 IEM_MC_ARG(uint8_t, u8Value, 1);
17168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17170 IEM_MC_LOCAL(int32_t, rc);
17171
17172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17173 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
17174 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17175 IEM_MC_REF_EFLAGS(pEFlags);
17176 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17177 IEM_MC_IF_LOCAL_IS_Z(rc) {
17178 IEM_MC_ADVANCE_RIP();
17179 } IEM_MC_ELSE() {
17180 IEM_MC_RAISE_DIVIDE_ERROR();
17181 } IEM_MC_ENDIF();
17182
17183 IEM_MC_END();
17184 }
17185 return VINF_SUCCESS;
17186}
17187
17188
17189/** Opcode 0xf7 /4, /5, /6 and /7. */
17190FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17191{
17192 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17193 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17194
17195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17196 {
17197 /* register access */
17198 switch (pIemCpu->enmEffOpSize)
17199 {
17200 case IEMMODE_16BIT:
17201 {
17202 IEMOP_HLP_NO_LOCK_PREFIX();
17203 IEM_MC_BEGIN(4, 1);
17204 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17205 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17206 IEM_MC_ARG(uint16_t, u16Value, 2);
17207 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17208 IEM_MC_LOCAL(int32_t, rc);
17209
17210 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17211 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17212 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17213 IEM_MC_REF_EFLAGS(pEFlags);
17214 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17215 IEM_MC_IF_LOCAL_IS_Z(rc) {
17216 IEM_MC_ADVANCE_RIP();
17217 } IEM_MC_ELSE() {
17218 IEM_MC_RAISE_DIVIDE_ERROR();
17219 } IEM_MC_ENDIF();
17220
17221 IEM_MC_END();
17222 return VINF_SUCCESS;
17223 }
17224
17225 case IEMMODE_32BIT:
17226 {
17227 IEMOP_HLP_NO_LOCK_PREFIX();
17228 IEM_MC_BEGIN(4, 1);
17229 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17230 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17231 IEM_MC_ARG(uint32_t, u32Value, 2);
17232 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17233 IEM_MC_LOCAL(int32_t, rc);
17234
17235 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17236 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17237 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17238 IEM_MC_REF_EFLAGS(pEFlags);
17239 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17240 IEM_MC_IF_LOCAL_IS_Z(rc) {
17241 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17242 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17243 IEM_MC_ADVANCE_RIP();
17244 } IEM_MC_ELSE() {
17245 IEM_MC_RAISE_DIVIDE_ERROR();
17246 } IEM_MC_ENDIF();
17247
17248 IEM_MC_END();
17249 return VINF_SUCCESS;
17250 }
17251
17252 case IEMMODE_64BIT:
17253 {
17254 IEMOP_HLP_NO_LOCK_PREFIX();
17255 IEM_MC_BEGIN(4, 1);
17256 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17257 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17258 IEM_MC_ARG(uint64_t, u64Value, 2);
17259 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17260 IEM_MC_LOCAL(int32_t, rc);
17261
17262 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17263 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17264 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17265 IEM_MC_REF_EFLAGS(pEFlags);
17266 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17267 IEM_MC_IF_LOCAL_IS_Z(rc) {
17268 IEM_MC_ADVANCE_RIP();
17269 } IEM_MC_ELSE() {
17270 IEM_MC_RAISE_DIVIDE_ERROR();
17271 } IEM_MC_ENDIF();
17272
17273 IEM_MC_END();
17274 return VINF_SUCCESS;
17275 }
17276
17277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17278 }
17279 }
17280 else
17281 {
17282 /* memory access. */
17283 switch (pIemCpu->enmEffOpSize)
17284 {
17285 case IEMMODE_16BIT:
17286 {
17287 IEMOP_HLP_NO_LOCK_PREFIX();
17288 IEM_MC_BEGIN(4, 2);
17289 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17290 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17291 IEM_MC_ARG(uint16_t, u16Value, 2);
17292 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17294 IEM_MC_LOCAL(int32_t, rc);
17295
17296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17297 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17298 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17299 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17300 IEM_MC_REF_EFLAGS(pEFlags);
17301 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17302 IEM_MC_IF_LOCAL_IS_Z(rc) {
17303 IEM_MC_ADVANCE_RIP();
17304 } IEM_MC_ELSE() {
17305 IEM_MC_RAISE_DIVIDE_ERROR();
17306 } IEM_MC_ENDIF();
17307
17308 IEM_MC_END();
17309 return VINF_SUCCESS;
17310 }
17311
17312 case IEMMODE_32BIT:
17313 {
17314 IEMOP_HLP_NO_LOCK_PREFIX();
17315 IEM_MC_BEGIN(4, 2);
17316 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17317 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17318 IEM_MC_ARG(uint32_t, u32Value, 2);
17319 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17321 IEM_MC_LOCAL(int32_t, rc);
17322
17323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17324 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17325 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17326 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17327 IEM_MC_REF_EFLAGS(pEFlags);
17328 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17329 IEM_MC_IF_LOCAL_IS_Z(rc) {
17330 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17331 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17332 IEM_MC_ADVANCE_RIP();
17333 } IEM_MC_ELSE() {
17334 IEM_MC_RAISE_DIVIDE_ERROR();
17335 } IEM_MC_ENDIF();
17336
17337 IEM_MC_END();
17338 return VINF_SUCCESS;
17339 }
17340
17341 case IEMMODE_64BIT:
17342 {
17343 IEMOP_HLP_NO_LOCK_PREFIX();
17344 IEM_MC_BEGIN(4, 2);
17345 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17346 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17347 IEM_MC_ARG(uint64_t, u64Value, 2);
17348 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17350 IEM_MC_LOCAL(int32_t, rc);
17351
17352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17353 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17354 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17355 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17356 IEM_MC_REF_EFLAGS(pEFlags);
17357 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17358 IEM_MC_IF_LOCAL_IS_Z(rc) {
17359 IEM_MC_ADVANCE_RIP();
17360 } IEM_MC_ELSE() {
17361 IEM_MC_RAISE_DIVIDE_ERROR();
17362 } IEM_MC_ENDIF();
17363
17364 IEM_MC_END();
17365 return VINF_SUCCESS;
17366 }
17367
17368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17369 }
17370 }
17371}
17372
17373/** Opcode 0xf6. */
17374FNIEMOP_DEF(iemOp_Grp3_Eb)
17375{
17376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17377 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17378 {
17379 case 0:
17380 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17381 case 1:
17382/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17383 return IEMOP_RAISE_INVALID_OPCODE();
17384 case 2:
17385 IEMOP_MNEMONIC("not Eb");
17386 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17387 case 3:
17388 IEMOP_MNEMONIC("neg Eb");
17389 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17390 case 4:
17391 IEMOP_MNEMONIC("mul Eb");
17392 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17393 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17394 case 5:
17395 IEMOP_MNEMONIC("imul Eb");
17396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17397 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17398 case 6:
17399 IEMOP_MNEMONIC("div Eb");
17400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17401 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17402 case 7:
17403 IEMOP_MNEMONIC("idiv Eb");
17404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17405 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17407 }
17408}
17409
17410
17411/** Opcode 0xf7. */
17412FNIEMOP_DEF(iemOp_Grp3_Ev)
17413{
17414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17415 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17416 {
17417 case 0:
17418 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17419 case 1:
17420/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17421 return IEMOP_RAISE_INVALID_OPCODE();
17422 case 2:
17423 IEMOP_MNEMONIC("not Ev");
17424 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17425 case 3:
17426 IEMOP_MNEMONIC("neg Ev");
17427 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17428 case 4:
17429 IEMOP_MNEMONIC("mul Ev");
17430 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17431 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17432 case 5:
17433 IEMOP_MNEMONIC("imul Ev");
17434 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17435 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17436 case 6:
17437 IEMOP_MNEMONIC("div Ev");
17438 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17439 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17440 case 7:
17441 IEMOP_MNEMONIC("idiv Ev");
17442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17443 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17445 }
17446}
17447
17448
17449/** Opcode 0xf8. */
17450FNIEMOP_DEF(iemOp_clc)
17451{
17452 IEMOP_MNEMONIC("clc");
17453 IEMOP_HLP_NO_LOCK_PREFIX();
17454 IEM_MC_BEGIN(0, 0);
17455 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17456 IEM_MC_ADVANCE_RIP();
17457 IEM_MC_END();
17458 return VINF_SUCCESS;
17459}
17460
17461
17462/** Opcode 0xf9. */
17463FNIEMOP_DEF(iemOp_stc)
17464{
17465 IEMOP_MNEMONIC("stc");
17466 IEMOP_HLP_NO_LOCK_PREFIX();
17467 IEM_MC_BEGIN(0, 0);
17468 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17469 IEM_MC_ADVANCE_RIP();
17470 IEM_MC_END();
17471 return VINF_SUCCESS;
17472}
17473
17474
17475/** Opcode 0xfa. */
17476FNIEMOP_DEF(iemOp_cli)
17477{
17478 IEMOP_MNEMONIC("cli");
17479 IEMOP_HLP_NO_LOCK_PREFIX();
17480 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17481}
17482
17483
17484FNIEMOP_DEF(iemOp_sti)
17485{
17486 IEMOP_MNEMONIC("sti");
17487 IEMOP_HLP_NO_LOCK_PREFIX();
17488 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17489}
17490
17491
17492/** Opcode 0xfc. */
17493FNIEMOP_DEF(iemOp_cld)
17494{
17495 IEMOP_MNEMONIC("cld");
17496 IEMOP_HLP_NO_LOCK_PREFIX();
17497 IEM_MC_BEGIN(0, 0);
17498 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17499 IEM_MC_ADVANCE_RIP();
17500 IEM_MC_END();
17501 return VINF_SUCCESS;
17502}
17503
17504
17505/** Opcode 0xfd. */
17506FNIEMOP_DEF(iemOp_std)
17507{
17508 IEMOP_MNEMONIC("std");
17509 IEMOP_HLP_NO_LOCK_PREFIX();
17510 IEM_MC_BEGIN(0, 0);
17511 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17512 IEM_MC_ADVANCE_RIP();
17513 IEM_MC_END();
17514 return VINF_SUCCESS;
17515}
17516
17517
17518/** Opcode 0xfe. */
17519FNIEMOP_DEF(iemOp_Grp4)
17520{
17521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17522 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17523 {
17524 case 0:
17525 IEMOP_MNEMONIC("inc Ev");
17526 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17527 case 1:
17528 IEMOP_MNEMONIC("dec Ev");
17529 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17530 default:
17531 IEMOP_MNEMONIC("grp4-ud");
17532 return IEMOP_RAISE_INVALID_OPCODE();
17533 }
17534}
17535
17536
17537/**
17538 * Opcode 0xff /2.
17539 * @param bRm The RM byte.
17540 */
17541FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17542{
17543 IEMOP_MNEMONIC("calln Ev");
17544 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17546
17547 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17548 {
17549 /* The new RIP is taken from a register. */
17550 switch (pIemCpu->enmEffOpSize)
17551 {
17552 case IEMMODE_16BIT:
17553 IEM_MC_BEGIN(1, 0);
17554 IEM_MC_ARG(uint16_t, u16Target, 0);
17555 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17556 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17557 IEM_MC_END()
17558 return VINF_SUCCESS;
17559
17560 case IEMMODE_32BIT:
17561 IEM_MC_BEGIN(1, 0);
17562 IEM_MC_ARG(uint32_t, u32Target, 0);
17563 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17564 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17565 IEM_MC_END()
17566 return VINF_SUCCESS;
17567
17568 case IEMMODE_64BIT:
17569 IEM_MC_BEGIN(1, 0);
17570 IEM_MC_ARG(uint64_t, u64Target, 0);
17571 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17572 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17573 IEM_MC_END()
17574 return VINF_SUCCESS;
17575
17576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17577 }
17578 }
17579 else
17580 {
17581 /* The new RIP is taken from a register. */
17582 switch (pIemCpu->enmEffOpSize)
17583 {
17584 case IEMMODE_16BIT:
17585 IEM_MC_BEGIN(1, 1);
17586 IEM_MC_ARG(uint16_t, u16Target, 0);
17587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17589 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17590 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17591 IEM_MC_END()
17592 return VINF_SUCCESS;
17593
17594 case IEMMODE_32BIT:
17595 IEM_MC_BEGIN(1, 1);
17596 IEM_MC_ARG(uint32_t, u32Target, 0);
17597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17599 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17600 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17601 IEM_MC_END()
17602 return VINF_SUCCESS;
17603
17604 case IEMMODE_64BIT:
17605 IEM_MC_BEGIN(1, 1);
17606 IEM_MC_ARG(uint64_t, u64Target, 0);
17607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17609 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17610 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17611 IEM_MC_END()
17612 return VINF_SUCCESS;
17613
17614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17615 }
17616 }
17617}
17618
17619typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17620
17621FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17622{
17623 /* Registers? How?? */
17624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17625 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17626
17627 /* Far pointer loaded from memory. */
17628 switch (pIemCpu->enmEffOpSize)
17629 {
17630 case IEMMODE_16BIT:
17631 IEM_MC_BEGIN(3, 1);
17632 IEM_MC_ARG(uint16_t, u16Sel, 0);
17633 IEM_MC_ARG(uint16_t, offSeg, 1);
17634 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17638 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17639 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17640 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17641 IEM_MC_END();
17642 return VINF_SUCCESS;
17643
17644 case IEMMODE_64BIT:
17645 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17646 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17647 * and call far qword [rsp] encodings. */
17648 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17649 {
17650 IEM_MC_BEGIN(3, 1);
17651 IEM_MC_ARG(uint16_t, u16Sel, 0);
17652 IEM_MC_ARG(uint64_t, offSeg, 1);
17653 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17657 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17658 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17659 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17660 IEM_MC_END();
17661 return VINF_SUCCESS;
17662 }
17663 /* AMD falls thru. */
17664
17665 case IEMMODE_32BIT:
17666 IEM_MC_BEGIN(3, 1);
17667 IEM_MC_ARG(uint16_t, u16Sel, 0);
17668 IEM_MC_ARG(uint32_t, offSeg, 1);
17669 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17673 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17674 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17675 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17676 IEM_MC_END();
17677 return VINF_SUCCESS;
17678
17679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17680 }
17681}
17682
17683
17684/**
17685 * Opcode 0xff /3.
17686 * @param bRm The RM byte.
17687 */
17688FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17689{
17690 IEMOP_MNEMONIC("callf Ep");
17691 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17692}
17693
17694
17695/**
17696 * Opcode 0xff /4.
17697 * @param bRm The RM byte.
17698 */
17699FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17700{
17701 IEMOP_MNEMONIC("jmpn Ev");
17702 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17703 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17704
17705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17706 {
17707 /* The new RIP is taken from a register. */
17708 switch (pIemCpu->enmEffOpSize)
17709 {
17710 case IEMMODE_16BIT:
17711 IEM_MC_BEGIN(0, 1);
17712 IEM_MC_LOCAL(uint16_t, u16Target);
17713 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17714 IEM_MC_SET_RIP_U16(u16Target);
17715 IEM_MC_END()
17716 return VINF_SUCCESS;
17717
17718 case IEMMODE_32BIT:
17719 IEM_MC_BEGIN(0, 1);
17720 IEM_MC_LOCAL(uint32_t, u32Target);
17721 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17722 IEM_MC_SET_RIP_U32(u32Target);
17723 IEM_MC_END()
17724 return VINF_SUCCESS;
17725
17726 case IEMMODE_64BIT:
17727 IEM_MC_BEGIN(0, 1);
17728 IEM_MC_LOCAL(uint64_t, u64Target);
17729 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17730 IEM_MC_SET_RIP_U64(u64Target);
17731 IEM_MC_END()
17732 return VINF_SUCCESS;
17733
17734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17735 }
17736 }
17737 else
17738 {
17739 /* The new RIP is taken from a memory location. */
17740 switch (pIemCpu->enmEffOpSize)
17741 {
17742 case IEMMODE_16BIT:
17743 IEM_MC_BEGIN(0, 2);
17744 IEM_MC_LOCAL(uint16_t, u16Target);
17745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17747 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17748 IEM_MC_SET_RIP_U16(u16Target);
17749 IEM_MC_END()
17750 return VINF_SUCCESS;
17751
17752 case IEMMODE_32BIT:
17753 IEM_MC_BEGIN(0, 2);
17754 IEM_MC_LOCAL(uint32_t, u32Target);
17755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17757 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17758 IEM_MC_SET_RIP_U32(u32Target);
17759 IEM_MC_END()
17760 return VINF_SUCCESS;
17761
17762 case IEMMODE_64BIT:
17763 IEM_MC_BEGIN(0, 2);
17764 IEM_MC_LOCAL(uint64_t, u64Target);
17765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17767 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17768 IEM_MC_SET_RIP_U64(u64Target);
17769 IEM_MC_END()
17770 return VINF_SUCCESS;
17771
17772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17773 }
17774 }
17775}
17776
17777
17778/**
17779 * Opcode 0xff /5.
17780 * @param bRm The RM byte.
17781 */
17782FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17783{
17784 IEMOP_MNEMONIC("jmpf Ep");
17785 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17786}
17787
17788
17789/**
17790 * Opcode 0xff /6.
17791 * @param bRm The RM byte.
17792 */
17793FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17794{
17795 IEMOP_MNEMONIC("push Ev");
17796 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17797
17798 /* Registers are handled by a common worker. */
17799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17800 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17801
17802 /* Memory we do here. */
17803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17804 switch (pIemCpu->enmEffOpSize)
17805 {
17806 case IEMMODE_16BIT:
17807 IEM_MC_BEGIN(0, 2);
17808 IEM_MC_LOCAL(uint16_t, u16Src);
17809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17811 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17812 IEM_MC_PUSH_U16(u16Src);
17813 IEM_MC_ADVANCE_RIP();
17814 IEM_MC_END();
17815 return VINF_SUCCESS;
17816
17817 case IEMMODE_32BIT:
17818 IEM_MC_BEGIN(0, 2);
17819 IEM_MC_LOCAL(uint32_t, u32Src);
17820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17822 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17823 IEM_MC_PUSH_U32(u32Src);
17824 IEM_MC_ADVANCE_RIP();
17825 IEM_MC_END();
17826 return VINF_SUCCESS;
17827
17828 case IEMMODE_64BIT:
17829 IEM_MC_BEGIN(0, 2);
17830 IEM_MC_LOCAL(uint64_t, u64Src);
17831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17833 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17834 IEM_MC_PUSH_U64(u64Src);
17835 IEM_MC_ADVANCE_RIP();
17836 IEM_MC_END();
17837 return VINF_SUCCESS;
17838
17839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17840 }
17841}
17842
17843
17844/** Opcode 0xff. */
17845FNIEMOP_DEF(iemOp_Grp5)
17846{
17847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17849 {
17850 case 0:
17851 IEMOP_MNEMONIC("inc Ev");
17852 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17853 case 1:
17854 IEMOP_MNEMONIC("dec Ev");
17855 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17856 case 2:
17857 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17858 case 3:
17859 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17860 case 4:
17861 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17862 case 5:
17863 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17864 case 6:
17865 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17866 case 7:
17867 IEMOP_MNEMONIC("grp5-ud");
17868 return IEMOP_RAISE_INVALID_OPCODE();
17869 }
17870 AssertFailedReturn(VERR_IEM_IPE_3);
17871}
17872
17873
17874
17875const PFNIEMOP g_apfnOneByteMap[256] =
17876{
17877 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17878 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17879 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17880 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17881 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17882 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17883 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17884 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17885 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17886 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17887 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17888 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17889 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17890 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17891 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17892 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17893 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17894 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17895 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17896 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17897 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17898 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17899 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17900 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17901 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17902 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17903 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17904 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17905 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17906 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17907 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17908 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17909 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17910 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17911 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17912 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17913 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17914 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17915 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17916 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17917 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17918 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17919 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17920 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17921 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17922 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17923 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17924 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17925 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17926 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17927 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17928 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17929 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17930 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17931 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17932 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17933 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17934 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17935 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17936 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17937 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17938 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17939 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17940 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17941};
17942
17943
17944/** @} */
17945
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette