VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60659

Last change on this file since 60659 was 60659, checked in by vboxsync, 9 years ago

iem/smsw: Missed a 286 tweak.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.9 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60659 2016-04-22 16:12:33Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(3, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
802 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
803 IEM_MC_END();
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmcall)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmresume)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /0. */
833FNIEMOP_DEF(iemOp_Grp7_vmxoff)
834{
835 IEMOP_BITCH_ABOUT_STUB();
836 return IEMOP_RAISE_INVALID_OPCODE();
837}
838
839
840/** Opcode 0x0f 0x01 /1. */
841FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
842{
843 IEMOP_MNEMONIC("sidt Ms");
844 IEMOP_HLP_MIN_286();
845 IEMOP_HLP_64BIT_OP_SIZE();
846 IEM_MC_BEGIN(3, 1);
847 IEM_MC_ARG(uint8_t, iEffSeg, 0);
848 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
849 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
853 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_monitor)
861{
862 IEMOP_MNEMONIC("monitor");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
864 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF(iemOp_Grp7_mwait)
870{
871 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
874}
875
876
877/** Opcode 0x0f 0x01 /2. */
878FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
879{
880 IEMOP_MNEMONIC("lgdt");
881 IEMOP_HLP_64BIT_OP_SIZE();
882 IEM_MC_BEGIN(3, 1);
883 IEM_MC_ARG(uint8_t, iEffSeg, 0);
884 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
888 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
889 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
890 IEM_MC_END();
891 return VINF_SUCCESS;
892}
893
894
895/** Opcode 0x0f 0x01 0xd0. */
896FNIEMOP_DEF(iemOp_Grp7_xgetbv)
897{
898 IEMOP_MNEMONIC("xgetbv");
899 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
900 {
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
902 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
903 }
904 return IEMOP_RAISE_INVALID_OPCODE();
905}
906
907
908/** Opcode 0x0f 0x01 0xd1. */
909FNIEMOP_DEF(iemOp_Grp7_xsetbv)
910{
911 IEMOP_MNEMONIC("xsetbv");
912 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
913 {
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
916 }
917 return IEMOP_RAISE_INVALID_OPCODE();
918}
919
920
921/** Opcode 0x0f 0x01 /3. */
922FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
923{
924 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
925 ? IEMMODE_64BIT
926 : pIemCpu->enmEffOpSize;
927 IEM_MC_BEGIN(3, 1);
928 IEM_MC_ARG(uint8_t, iEffSeg, 0);
929 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
930 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
934 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
935 IEM_MC_END();
936 return VINF_SUCCESS;
937}
938
939
940/** Opcode 0x0f 0x01 0xd8. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
942
943/** Opcode 0x0f 0x01 0xd9. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
945
946/** Opcode 0x0f 0x01 0xda. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
948
949/** Opcode 0x0f 0x01 0xdb. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
951
952/** Opcode 0x0f 0x01 0xdc. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
954
955/** Opcode 0x0f 0x01 0xdd. */
956FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
957
958/** Opcode 0x0f 0x01 0xde. */
959FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
960
961/** Opcode 0x0f 0x01 0xdf. */
962FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
963
964/** Opcode 0x0f 0x01 /4. */
965FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
966{
967 IEMOP_MNEMONIC("smsw");
968 IEMOP_HLP_MIN_286();
969 IEMOP_HLP_NO_LOCK_PREFIX();
970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
971 {
972 switch (pIemCpu->enmEffOpSize)
973 {
974 case IEMMODE_16BIT:
975 IEM_MC_BEGIN(0, 1);
976 IEM_MC_LOCAL(uint16_t, u16Tmp);
977 IEM_MC_FETCH_CR0_U16(u16Tmp);
978#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
979 if (pIemCpu->uTargetCpu == IEMTARGETCPU_286)
980 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0); /* Reserved bits observed all set on real hw. */
981#endif
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
1017 if (pIemCpu->uTargetCpu == IEMTARGETCPU_286)
1018 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0); /* Reserved bits observed all set on real hw. */
1019#endif
1020 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1021 IEM_MC_ADVANCE_RIP();
1022 IEM_MC_END();
1023 return VINF_SUCCESS;
1024 }
1025}
1026
1027
1028/** Opcode 0x0f 0x01 /6. */
1029FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1030{
1031 /* The operand size is effectively ignored, all is 16-bit and only the
1032 lower 3-bits are used. */
1033 IEMOP_MNEMONIC("lmsw");
1034 IEMOP_HLP_MIN_286();
1035 IEMOP_HLP_NO_LOCK_PREFIX();
1036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1037 {
1038 IEM_MC_BEGIN(1, 0);
1039 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1040 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1041 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1042 IEM_MC_END();
1043 }
1044 else
1045 {
1046 IEM_MC_BEGIN(1, 1);
1047 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1050 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1051 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1052 IEM_MC_END();
1053 }
1054 return VINF_SUCCESS;
1055}
1056
1057
1058/** Opcode 0x0f 0x01 /7. */
1059FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1060{
1061 IEMOP_MNEMONIC("invlpg");
1062 IEMOP_HLP_MIN_486();
1063 IEMOP_HLP_NO_LOCK_PREFIX();
1064 IEM_MC_BEGIN(1, 1);
1065 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1067 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1068 IEM_MC_END();
1069 return VINF_SUCCESS;
1070}
1071
1072
1073/** Opcode 0x0f 0x01 /7. */
1074FNIEMOP_DEF(iemOp_Grp7_swapgs)
1075{
1076 IEMOP_MNEMONIC("swapgs");
1077 IEMOP_HLP_ONLY_64BIT();
1078 IEMOP_HLP_NO_LOCK_PREFIX();
1079 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1080}
1081
1082
1083/** Opcode 0x0f 0x01 /7. */
1084FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1085{
1086 NOREF(pIemCpu);
1087 IEMOP_BITCH_ABOUT_STUB();
1088 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1089}
1090
1091
1092/** Opcode 0x0f 0x01. */
1093FNIEMOP_DEF(iemOp_Grp7)
1094{
1095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1096 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1097 {
1098 case 0:
1099 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1100 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1101 switch (bRm & X86_MODRM_RM_MASK)
1102 {
1103 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1104 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1105 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1106 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1107 }
1108 return IEMOP_RAISE_INVALID_OPCODE();
1109
1110 case 1:
1111 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1112 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1113 switch (bRm & X86_MODRM_RM_MASK)
1114 {
1115 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1116 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1117 }
1118 return IEMOP_RAISE_INVALID_OPCODE();
1119
1120 case 2:
1121 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1122 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1123 switch (bRm & X86_MODRM_RM_MASK)
1124 {
1125 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1126 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1127 }
1128 return IEMOP_RAISE_INVALID_OPCODE();
1129
1130 case 3:
1131 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1132 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1133 switch (bRm & X86_MODRM_RM_MASK)
1134 {
1135 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1136 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1137 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1138 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1139 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1140 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1141 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1142 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1144 }
1145
1146 case 4:
1147 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1148
1149 case 5:
1150 return IEMOP_RAISE_INVALID_OPCODE();
1151
1152 case 6:
1153 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1154
1155 case 7:
1156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1157 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1158 switch (bRm & X86_MODRM_RM_MASK)
1159 {
1160 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1161 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1162 }
1163 return IEMOP_RAISE_INVALID_OPCODE();
1164
1165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1166 }
1167}
1168
1169/** Opcode 0x0f 0x00 /3. */
1170FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1171{
1172 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1174
1175 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1176 {
1177 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1178 switch (pIemCpu->enmEffOpSize)
1179 {
1180 case IEMMODE_16BIT:
1181 {
1182 IEM_MC_BEGIN(4, 0);
1183 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1184 IEM_MC_ARG(uint16_t, u16Sel, 1);
1185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1186 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1187
1188 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1189 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1190 IEM_MC_REF_EFLAGS(pEFlags);
1191 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1192
1193 IEM_MC_END();
1194 return VINF_SUCCESS;
1195 }
1196
1197 case IEMMODE_32BIT:
1198 case IEMMODE_64BIT:
1199 {
1200 IEM_MC_BEGIN(4, 0);
1201 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1202 IEM_MC_ARG(uint16_t, u16Sel, 1);
1203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1204 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1205
1206 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1207 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1208 IEM_MC_REF_EFLAGS(pEFlags);
1209 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1210
1211 IEM_MC_END();
1212 return VINF_SUCCESS;
1213 }
1214
1215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1216 }
1217 }
1218 else
1219 {
1220 switch (pIemCpu->enmEffOpSize)
1221 {
1222 case IEMMODE_16BIT:
1223 {
1224 IEM_MC_BEGIN(4, 1);
1225 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1226 IEM_MC_ARG(uint16_t, u16Sel, 1);
1227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1228 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230
1231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1232 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1233
1234 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1235 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1236 IEM_MC_REF_EFLAGS(pEFlags);
1237 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1238
1239 IEM_MC_END();
1240 return VINF_SUCCESS;
1241 }
1242
1243 case IEMMODE_32BIT:
1244 case IEMMODE_64BIT:
1245 {
1246 IEM_MC_BEGIN(4, 1);
1247 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1248 IEM_MC_ARG(uint16_t, u16Sel, 1);
1249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1250 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1252
1253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1254 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1255/** @todo testcase: make sure it's a 16-bit read. */
1256
1257 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1258 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1259 IEM_MC_REF_EFLAGS(pEFlags);
1260 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1261
1262 IEM_MC_END();
1263 return VINF_SUCCESS;
1264 }
1265
1266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1267 }
1268 }
1269}
1270
1271
1272
1273/** Opcode 0x0f 0x02. */
1274FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1275{
1276 IEMOP_MNEMONIC("lar Gv,Ew");
1277 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1278}
1279
1280
1281/** Opcode 0x0f 0x03. */
1282FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1283{
1284 IEMOP_MNEMONIC("lsl Gv,Ew");
1285 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1286}
1287
1288
1289/** Opcode 0x0f 0x05. */
1290FNIEMOP_DEF(iemOp_syscall)
1291{
1292 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1293 IEMOP_HLP_NO_LOCK_PREFIX();
1294 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1295}
1296
1297
1298/** Opcode 0x0f 0x06. */
1299FNIEMOP_DEF(iemOp_clts)
1300{
1301 IEMOP_MNEMONIC("clts");
1302 IEMOP_HLP_NO_LOCK_PREFIX();
1303 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1304}
1305
1306
1307/** Opcode 0x0f 0x07. */
1308FNIEMOP_DEF(iemOp_sysret)
1309{
1310 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1311 IEMOP_HLP_NO_LOCK_PREFIX();
1312 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1313}
1314
1315
1316/** Opcode 0x0f 0x08. */
1317FNIEMOP_STUB(iemOp_invd);
1318// IEMOP_HLP_MIN_486();
1319
1320
1321/** Opcode 0x0f 0x09. */
1322FNIEMOP_DEF(iemOp_wbinvd)
1323{
1324 IEMOP_MNEMONIC("wbinvd");
1325 IEMOP_HLP_MIN_486();
1326 IEMOP_HLP_NO_LOCK_PREFIX();
1327 IEM_MC_BEGIN(0, 0);
1328 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1329 IEM_MC_ADVANCE_RIP();
1330 IEM_MC_END();
1331 return VINF_SUCCESS; /* ignore for now */
1332}
1333
1334
1335/** Opcode 0x0f 0x0b. */
1336FNIEMOP_DEF(iemOp_ud2)
1337{
1338 IEMOP_MNEMONIC("ud2");
1339 return IEMOP_RAISE_INVALID_OPCODE();
1340}
1341
1342/** Opcode 0x0f 0x0d. */
1343FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1344{
1345 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1346 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1347 {
1348 IEMOP_MNEMONIC("GrpP");
1349 return IEMOP_RAISE_INVALID_OPCODE();
1350 }
1351
1352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1354 {
1355 IEMOP_MNEMONIC("GrpP");
1356 return IEMOP_RAISE_INVALID_OPCODE();
1357 }
1358
1359 IEMOP_HLP_NO_LOCK_PREFIX();
1360 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1361 {
1362 case 2: /* Aliased to /0 for the time being. */
1363 case 4: /* Aliased to /0 for the time being. */
1364 case 5: /* Aliased to /0 for the time being. */
1365 case 6: /* Aliased to /0 for the time being. */
1366 case 7: /* Aliased to /0 for the time being. */
1367 case 0: IEMOP_MNEMONIC("prefetch"); break;
1368 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1369 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1371 }
1372
1373 IEM_MC_BEGIN(0, 1);
1374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 /* Currently a NOP. */
1377 IEM_MC_ADVANCE_RIP();
1378 IEM_MC_END();
1379 return VINF_SUCCESS;
1380}
1381
1382
1383/** Opcode 0x0f 0x0e. */
1384FNIEMOP_STUB(iemOp_femms);
1385
1386
1387/** Opcode 0x0f 0x0f 0x0c. */
1388FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1389
1390/** Opcode 0x0f 0x0f 0x0d. */
1391FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1392
1393/** Opcode 0x0f 0x0f 0x1c. */
1394FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1395
1396/** Opcode 0x0f 0x0f 0x1d. */
1397FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1398
1399/** Opcode 0x0f 0x0f 0x8a. */
1400FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1401
1402/** Opcode 0x0f 0x0f 0x8e. */
1403FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1404
1405/** Opcode 0x0f 0x0f 0x90. */
1406FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1407
1408/** Opcode 0x0f 0x0f 0x94. */
1409FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1410
1411/** Opcode 0x0f 0x0f 0x96. */
1412FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1413
1414/** Opcode 0x0f 0x0f 0x97. */
1415FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x9a. */
1418FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x9e. */
1421FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1422
1423/** Opcode 0x0f 0x0f 0xa0. */
1424FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0xa4. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0xa6. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0xa7. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0xaa. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0xae. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1440
1441/** Opcode 0x0f 0x0f 0xb0. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0xb4. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0xb6. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xb7. */
1451FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xbb. */
1454FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xbf. */
1457FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1458
1459
1460/** Opcode 0x0f 0x0f. */
1461FNIEMOP_DEF(iemOp_3Dnow)
1462{
1463 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1464 {
1465 IEMOP_MNEMONIC("3Dnow");
1466 return IEMOP_RAISE_INVALID_OPCODE();
1467 }
1468
1469 /* This is pretty sparse, use switch instead of table. */
1470 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1471 switch (b)
1472 {
1473 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1474 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1475 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1476 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1477 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1478 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1479 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1480 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1481 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1482 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1483 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1484 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1485 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1486 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1487 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1488 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1489 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1490 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1491 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1492 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1493 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1494 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1495 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1496 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1497 default:
1498 return IEMOP_RAISE_INVALID_OPCODE();
1499 }
1500}
1501
1502
1503/** Opcode 0x0f 0x10. */
1504FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1505/** Opcode 0x0f 0x11. */
1506FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x12. */
1508FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1509/** Opcode 0x0f 0x13. */
1510FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1511/** Opcode 0x0f 0x14. */
1512FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1513/** Opcode 0x0f 0x15. */
1514FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1515/** Opcode 0x0f 0x16. */
1516FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1517/** Opcode 0x0f 0x17. */
1518FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1519
1520
1521/** Opcode 0x0f 0x18. */
1522FNIEMOP_DEF(iemOp_prefetch_Grp16)
1523{
1524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1525 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1526 {
1527 IEMOP_HLP_NO_LOCK_PREFIX();
1528 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1529 {
1530 case 4: /* Aliased to /0 for the time being according to AMD. */
1531 case 5: /* Aliased to /0 for the time being according to AMD. */
1532 case 6: /* Aliased to /0 for the time being according to AMD. */
1533 case 7: /* Aliased to /0 for the time being according to AMD. */
1534 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1535 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1536 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1537 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1539 }
1540
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 /* Currently a NOP. */
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 return VINF_SUCCESS;
1548 }
1549
1550 return IEMOP_RAISE_INVALID_OPCODE();
1551}
1552
1553
1554/** Opcode 0x0f 0x19..0x1f. */
1555FNIEMOP_DEF(iemOp_nop_Ev)
1556{
1557 IEMOP_HLP_NO_LOCK_PREFIX();
1558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1559 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1560 {
1561 IEM_MC_BEGIN(0, 0);
1562 IEM_MC_ADVANCE_RIP();
1563 IEM_MC_END();
1564 }
1565 else
1566 {
1567 IEM_MC_BEGIN(0, 1);
1568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 /* Currently a NOP. */
1571 IEM_MC_ADVANCE_RIP();
1572 IEM_MC_END();
1573 }
1574 return VINF_SUCCESS;
1575}
1576
1577
1578/** Opcode 0x0f 0x20. */
1579FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1580{
1581 /* mod is ignored, as is operand size overrides. */
1582 IEMOP_MNEMONIC("mov Rd,Cd");
1583 IEMOP_HLP_MIN_386();
1584 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1585 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1586 else
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1588
1589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1590 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1591 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1592 {
1593 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1594 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1595 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1596 iCrReg |= 8;
1597 }
1598 switch (iCrReg)
1599 {
1600 case 0: case 2: case 3: case 4: case 8:
1601 break;
1602 default:
1603 return IEMOP_RAISE_INVALID_OPCODE();
1604 }
1605 IEMOP_HLP_DONE_DECODING();
1606
1607 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1608}
1609
1610
1611/** Opcode 0x0f 0x21. */
1612FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1613{
1614 IEMOP_MNEMONIC("mov Rd,Dd");
1615 IEMOP_HLP_MIN_386();
1616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1617 IEMOP_HLP_NO_LOCK_PREFIX();
1618 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1619 return IEMOP_RAISE_INVALID_OPCODE();
1620 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1621 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1622 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1623}
1624
1625
1626/** Opcode 0x0f 0x22. */
1627FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1628{
1629 /* mod is ignored, as is operand size overrides. */
1630 IEMOP_MNEMONIC("mov Cd,Rd");
1631 IEMOP_HLP_MIN_386();
1632 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1633 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1634 else
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1636
1637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1638 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1639 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1640 {
1641 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1642 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1643 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1644 iCrReg |= 8;
1645 }
1646 switch (iCrReg)
1647 {
1648 case 0: case 2: case 3: case 4: case 8:
1649 break;
1650 default:
1651 return IEMOP_RAISE_INVALID_OPCODE();
1652 }
1653 IEMOP_HLP_DONE_DECODING();
1654
1655 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1656}
1657
1658
1659/** Opcode 0x0f 0x23. */
1660FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1661{
1662 IEMOP_MNEMONIC("mov Dd,Rd");
1663 IEMOP_HLP_MIN_386();
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1666 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1667 return IEMOP_RAISE_INVALID_OPCODE();
1668 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1669 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1670 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1671}
1672
1673
1674/** Opcode 0x0f 0x24. */
1675FNIEMOP_DEF(iemOp_mov_Rd_Td)
1676{
1677 IEMOP_MNEMONIC("mov Rd,Td");
1678 /** @todo works on 386 and 486. */
1679 /* The RM byte is not considered, see testcase. */
1680 return IEMOP_RAISE_INVALID_OPCODE();
1681}
1682
1683
1684/** Opcode 0x0f 0x26. */
1685FNIEMOP_DEF(iemOp_mov_Td_Rd)
1686{
1687 IEMOP_MNEMONIC("mov Td,Rd");
1688 /** @todo works on 386 and 486. */
1689 /* The RM byte is not considered, see testcase. */
1690 return IEMOP_RAISE_INVALID_OPCODE();
1691}
1692
1693
1694/** Opcode 0x0f 0x28. */
1695FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1696/** Opcode 0x0f 0x29. */
1697FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1698/** Opcode 0x0f 0x2a. */
1699FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1700/** Opcode 0x0f 0x2b. */
1701FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1702/** Opcode 0x0f 0x2c. */
1703FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1704/** Opcode 0x0f 0x2d. */
1705FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1706/** Opcode 0x0f 0x2e. */
1707FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1708/** Opcode 0x0f 0x2f. */
1709FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1710
1711
1712/** Opcode 0x0f 0x30. */
1713FNIEMOP_DEF(iemOp_wrmsr)
1714{
1715 IEMOP_MNEMONIC("wrmsr");
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1718}
1719
1720
1721/** Opcode 0x0f 0x31. */
1722FNIEMOP_DEF(iemOp_rdtsc)
1723{
1724 IEMOP_MNEMONIC("rdtsc");
1725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1726 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1727}
1728
1729
1730/** Opcode 0x0f 0x33. */
1731FNIEMOP_DEF(iemOp_rdmsr)
1732{
1733 IEMOP_MNEMONIC("rdmsr");
1734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1735 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1736}
1737
1738
1739/** Opcode 0x0f 0x34. */
1740FNIEMOP_STUB(iemOp_rdpmc);
1741/** Opcode 0x0f 0x34. */
1742FNIEMOP_STUB(iemOp_sysenter);
1743/** Opcode 0x0f 0x35. */
1744FNIEMOP_STUB(iemOp_sysexit);
1745/** Opcode 0x0f 0x37. */
1746FNIEMOP_STUB(iemOp_getsec);
1747/** Opcode 0x0f 0x38. */
1748FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1749/** Opcode 0x0f 0x3a. */
1750FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1751/** Opcode 0x0f 0x3c (?). */
1752FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1753
1754/**
1755 * Implements a conditional move.
1756 *
1757 * Wish there was an obvious way to do this where we could share and reduce
1758 * code bloat.
1759 *
1760 * @param a_Cnd The conditional "microcode" operation.
1761 */
1762#define CMOV_X(a_Cnd) \
1763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1765 { \
1766 switch (pIemCpu->enmEffOpSize) \
1767 { \
1768 case IEMMODE_16BIT: \
1769 IEM_MC_BEGIN(0, 1); \
1770 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1771 a_Cnd { \
1772 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1773 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1774 } IEM_MC_ENDIF(); \
1775 IEM_MC_ADVANCE_RIP(); \
1776 IEM_MC_END(); \
1777 return VINF_SUCCESS; \
1778 \
1779 case IEMMODE_32BIT: \
1780 IEM_MC_BEGIN(0, 1); \
1781 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1782 a_Cnd { \
1783 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1784 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1785 } IEM_MC_ELSE() { \
1786 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1787 } IEM_MC_ENDIF(); \
1788 IEM_MC_ADVANCE_RIP(); \
1789 IEM_MC_END(); \
1790 return VINF_SUCCESS; \
1791 \
1792 case IEMMODE_64BIT: \
1793 IEM_MC_BEGIN(0, 1); \
1794 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1795 a_Cnd { \
1796 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1797 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1798 } IEM_MC_ENDIF(); \
1799 IEM_MC_ADVANCE_RIP(); \
1800 IEM_MC_END(); \
1801 return VINF_SUCCESS; \
1802 \
1803 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1804 } \
1805 } \
1806 else \
1807 { \
1808 switch (pIemCpu->enmEffOpSize) \
1809 { \
1810 case IEMMODE_16BIT: \
1811 IEM_MC_BEGIN(0, 2); \
1812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1813 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1815 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1816 a_Cnd { \
1817 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1818 } IEM_MC_ENDIF(); \
1819 IEM_MC_ADVANCE_RIP(); \
1820 IEM_MC_END(); \
1821 return VINF_SUCCESS; \
1822 \
1823 case IEMMODE_32BIT: \
1824 IEM_MC_BEGIN(0, 2); \
1825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1826 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1828 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1829 a_Cnd { \
1830 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1831 } IEM_MC_ELSE() { \
1832 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1833 } IEM_MC_ENDIF(); \
1834 IEM_MC_ADVANCE_RIP(); \
1835 IEM_MC_END(); \
1836 return VINF_SUCCESS; \
1837 \
1838 case IEMMODE_64BIT: \
1839 IEM_MC_BEGIN(0, 2); \
1840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1841 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1843 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1844 a_Cnd { \
1845 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1846 } IEM_MC_ENDIF(); \
1847 IEM_MC_ADVANCE_RIP(); \
1848 IEM_MC_END(); \
1849 return VINF_SUCCESS; \
1850 \
1851 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1852 } \
1853 } do {} while (0)
1854
1855
1856
1857/** Opcode 0x0f 0x40. */
1858FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1859{
1860 IEMOP_MNEMONIC("cmovo Gv,Ev");
1861 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1862}
1863
1864
1865/** Opcode 0x0f 0x41. */
1866FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1867{
1868 IEMOP_MNEMONIC("cmovno Gv,Ev");
1869 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1870}
1871
1872
1873/** Opcode 0x0f 0x42. */
1874FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1875{
1876 IEMOP_MNEMONIC("cmovc Gv,Ev");
1877 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1878}
1879
1880
1881/** Opcode 0x0f 0x43. */
1882FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1883{
1884 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1885 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1886}
1887
1888
1889/** Opcode 0x0f 0x44. */
1890FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1891{
1892 IEMOP_MNEMONIC("cmove Gv,Ev");
1893 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1894}
1895
1896
1897/** Opcode 0x0f 0x45. */
1898FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1899{
1900 IEMOP_MNEMONIC("cmovne Gv,Ev");
1901 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1902}
1903
1904
1905/** Opcode 0x0f 0x46. */
1906FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1907{
1908 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1909 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1910}
1911
1912
1913/** Opcode 0x0f 0x47. */
1914FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1915{
1916 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1917 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1918}
1919
1920
1921/** Opcode 0x0f 0x48. */
1922FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1923{
1924 IEMOP_MNEMONIC("cmovs Gv,Ev");
1925 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1926}
1927
1928
1929/** Opcode 0x0f 0x49. */
1930FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1931{
1932 IEMOP_MNEMONIC("cmovns Gv,Ev");
1933 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1934}
1935
1936
1937/** Opcode 0x0f 0x4a. */
1938FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1939{
1940 IEMOP_MNEMONIC("cmovp Gv,Ev");
1941 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1942}
1943
1944
1945/** Opcode 0x0f 0x4b. */
1946FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1947{
1948 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1949 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1950}
1951
1952
1953/** Opcode 0x0f 0x4c. */
1954FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1955{
1956 IEMOP_MNEMONIC("cmovl Gv,Ev");
1957 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1958}
1959
1960
1961/** Opcode 0x0f 0x4d. */
1962FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1963{
1964 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1965 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1966}
1967
1968
1969/** Opcode 0x0f 0x4e. */
1970FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1971{
1972 IEMOP_MNEMONIC("cmovle Gv,Ev");
1973 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1974}
1975
1976
1977/** Opcode 0x0f 0x4f. */
1978FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1979{
1980 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1981 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1982}
1983
1984#undef CMOV_X
1985
1986/** Opcode 0x0f 0x50. */
1987FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1988/** Opcode 0x0f 0x51. */
1989FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1990/** Opcode 0x0f 0x52. */
1991FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1992/** Opcode 0x0f 0x53. */
1993FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1994/** Opcode 0x0f 0x54. */
1995FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1996/** Opcode 0x0f 0x55. */
1997FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1998/** Opcode 0x0f 0x56. */
1999FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2000/** Opcode 0x0f 0x57. */
2001FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2002/** Opcode 0x0f 0x58. */
2003FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2004/** Opcode 0x0f 0x59. */
2005FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2006/** Opcode 0x0f 0x5a. */
2007FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2008/** Opcode 0x0f 0x5b. */
2009FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2010/** Opcode 0x0f 0x5c. */
2011FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2012/** Opcode 0x0f 0x5d. */
2013FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2014/** Opcode 0x0f 0x5e. */
2015FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2016/** Opcode 0x0f 0x5f. */
2017FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2018
2019
2020/**
2021 * Common worker for SSE2 and MMX instructions on the forms:
2022 * pxxxx xmm1, xmm2/mem128
2023 * pxxxx mm1, mm2/mem32
2024 *
2025 * The 2nd operand is the first half of a register, which in the memory case
2026 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2027 * memory accessed for MMX.
2028 *
2029 * Exceptions type 4.
2030 */
2031FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2032{
2033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2034 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2035 {
2036 case IEM_OP_PRF_SIZE_OP: /* SSE */
2037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2038 {
2039 /*
2040 * Register, register.
2041 */
2042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2043 IEM_MC_BEGIN(2, 0);
2044 IEM_MC_ARG(uint128_t *, pDst, 0);
2045 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2046 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2047 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2048 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2049 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2050 IEM_MC_ADVANCE_RIP();
2051 IEM_MC_END();
2052 }
2053 else
2054 {
2055 /*
2056 * Register, memory.
2057 */
2058 IEM_MC_BEGIN(2, 2);
2059 IEM_MC_ARG(uint128_t *, pDst, 0);
2060 IEM_MC_LOCAL(uint64_t, uSrc);
2061 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2063
2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2066 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2067 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2068
2069 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2070 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2071
2072 IEM_MC_ADVANCE_RIP();
2073 IEM_MC_END();
2074 }
2075 return VINF_SUCCESS;
2076
2077 case 0: /* MMX */
2078 if (!pImpl->pfnU64)
2079 return IEMOP_RAISE_INVALID_OPCODE();
2080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2081 {
2082 /*
2083 * Register, register.
2084 */
2085 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2086 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2088 IEM_MC_BEGIN(2, 0);
2089 IEM_MC_ARG(uint64_t *, pDst, 0);
2090 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2091 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2092 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2093 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2094 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2095 IEM_MC_ADVANCE_RIP();
2096 IEM_MC_END();
2097 }
2098 else
2099 {
2100 /*
2101 * Register, memory.
2102 */
2103 IEM_MC_BEGIN(2, 2);
2104 IEM_MC_ARG(uint64_t *, pDst, 0);
2105 IEM_MC_LOCAL(uint32_t, uSrc);
2106 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2108
2109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2112 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2113
2114 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2115 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2116
2117 IEM_MC_ADVANCE_RIP();
2118 IEM_MC_END();
2119 }
2120 return VINF_SUCCESS;
2121
2122 default:
2123 return IEMOP_RAISE_INVALID_OPCODE();
2124 }
2125}
2126
2127
2128/** Opcode 0x0f 0x60. */
2129FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2130{
2131 IEMOP_MNEMONIC("punpcklbw");
2132 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2133}
2134
2135
2136/** Opcode 0x0f 0x61. */
2137FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2138{
2139 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2140 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2141}
2142
2143
2144/** Opcode 0x0f 0x62. */
2145FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2146{
2147 IEMOP_MNEMONIC("punpckldq");
2148 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2149}
2150
2151
2152/** Opcode 0x0f 0x63. */
2153FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2154/** Opcode 0x0f 0x64. */
2155FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2156/** Opcode 0x0f 0x65. */
2157FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2158/** Opcode 0x0f 0x66. */
2159FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2160/** Opcode 0x0f 0x67. */
2161FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2162
2163
2164/**
2165 * Common worker for SSE2 and MMX instructions on the forms:
2166 * pxxxx xmm1, xmm2/mem128
2167 * pxxxx mm1, mm2/mem64
2168 *
2169 * The 2nd operand is the second half of a register, which in the memory case
2170 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2171 * where it may read the full 128 bits or only the upper 64 bits.
2172 *
2173 * Exceptions type 4.
2174 */
2175FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2176{
2177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2178 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2179 {
2180 case IEM_OP_PRF_SIZE_OP: /* SSE */
2181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2182 {
2183 /*
2184 * Register, register.
2185 */
2186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2187 IEM_MC_BEGIN(2, 0);
2188 IEM_MC_ARG(uint128_t *, pDst, 0);
2189 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2190 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2191 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2192 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2193 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2194 IEM_MC_ADVANCE_RIP();
2195 IEM_MC_END();
2196 }
2197 else
2198 {
2199 /*
2200 * Register, memory.
2201 */
2202 IEM_MC_BEGIN(2, 2);
2203 IEM_MC_ARG(uint128_t *, pDst, 0);
2204 IEM_MC_LOCAL(uint128_t, uSrc);
2205 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2207
2208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2211 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2212
2213 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2214 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2215
2216 IEM_MC_ADVANCE_RIP();
2217 IEM_MC_END();
2218 }
2219 return VINF_SUCCESS;
2220
2221 case 0: /* MMX */
2222 if (!pImpl->pfnU64)
2223 return IEMOP_RAISE_INVALID_OPCODE();
2224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2225 {
2226 /*
2227 * Register, register.
2228 */
2229 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2230 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 IEM_MC_BEGIN(2, 0);
2233 IEM_MC_ARG(uint64_t *, pDst, 0);
2234 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2235 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2236 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2237 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2238 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2239 IEM_MC_ADVANCE_RIP();
2240 IEM_MC_END();
2241 }
2242 else
2243 {
2244 /*
2245 * Register, memory.
2246 */
2247 IEM_MC_BEGIN(2, 2);
2248 IEM_MC_ARG(uint64_t *, pDst, 0);
2249 IEM_MC_LOCAL(uint64_t, uSrc);
2250 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2252
2253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2255 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2256 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2257
2258 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2259 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2260
2261 IEM_MC_ADVANCE_RIP();
2262 IEM_MC_END();
2263 }
2264 return VINF_SUCCESS;
2265
2266 default:
2267 return IEMOP_RAISE_INVALID_OPCODE();
2268 }
2269}
2270
2271
2272/** Opcode 0x0f 0x68. */
2273FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2274{
2275 IEMOP_MNEMONIC("punpckhbw");
2276 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2277}
2278
2279
2280/** Opcode 0x0f 0x69. */
2281FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2282{
2283 IEMOP_MNEMONIC("punpckhwd");
2284 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2285}
2286
2287
2288/** Opcode 0x0f 0x6a. */
2289FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2290{
2291 IEMOP_MNEMONIC("punpckhdq");
2292 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2293}
2294
2295/** Opcode 0x0f 0x6b. */
2296FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2297
2298
2299/** Opcode 0x0f 0x6c. */
2300FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2301{
2302 IEMOP_MNEMONIC("punpcklqdq");
2303 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2304}
2305
2306
2307/** Opcode 0x0f 0x6d. */
2308FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2309{
2310 IEMOP_MNEMONIC("punpckhqdq");
2311 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2312}
2313
2314
2315/** Opcode 0x0f 0x6e. */
2316FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2317{
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2320 {
2321 case IEM_OP_PRF_SIZE_OP: /* SSE */
2322 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2324 {
2325 /* XMM, greg*/
2326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2327 IEM_MC_BEGIN(0, 1);
2328 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2329 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2330 {
2331 IEM_MC_LOCAL(uint64_t, u64Tmp);
2332 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2333 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2334 }
2335 else
2336 {
2337 IEM_MC_LOCAL(uint32_t, u32Tmp);
2338 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2339 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2340 }
2341 IEM_MC_ADVANCE_RIP();
2342 IEM_MC_END();
2343 }
2344 else
2345 {
2346 /* XMM, [mem] */
2347 IEM_MC_BEGIN(0, 2);
2348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2349 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2352 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2353 {
2354 IEM_MC_LOCAL(uint64_t, u64Tmp);
2355 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2356 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2357 }
2358 else
2359 {
2360 IEM_MC_LOCAL(uint32_t, u32Tmp);
2361 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2362 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2363 }
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 return VINF_SUCCESS;
2368
2369 case 0: /* MMX */
2370 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2371 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2372 {
2373 /* MMX, greg */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 1);
2376 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2377 IEM_MC_LOCAL(uint64_t, u64Tmp);
2378 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2379 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2380 else
2381 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2382 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2383 IEM_MC_ADVANCE_RIP();
2384 IEM_MC_END();
2385 }
2386 else
2387 {
2388 /* MMX, [mem] */
2389 IEM_MC_BEGIN(0, 2);
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2395 {
2396 IEM_MC_LOCAL(uint64_t, u64Tmp);
2397 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2398 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2399 }
2400 else
2401 {
2402 IEM_MC_LOCAL(uint32_t, u32Tmp);
2403 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2404 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2405 }
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 return VINF_SUCCESS;
2410
2411 default:
2412 return IEMOP_RAISE_INVALID_OPCODE();
2413 }
2414}
2415
2416
2417/** Opcode 0x0f 0x6f. */
2418FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2419{
2420 bool fAligned = false;
2421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2422 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2423 {
2424 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2425 fAligned = true;
2426 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2427 if (fAligned)
2428 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2429 else
2430 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2431 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2432 {
2433 /*
2434 * Register, register.
2435 */
2436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2437 IEM_MC_BEGIN(0, 1);
2438 IEM_MC_LOCAL(uint128_t, u128Tmp);
2439 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2440 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2441 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 else
2446 {
2447 /*
2448 * Register, memory.
2449 */
2450 IEM_MC_BEGIN(0, 2);
2451 IEM_MC_LOCAL(uint128_t, u128Tmp);
2452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2453
2454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2457 if (fAligned)
2458 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2459 else
2460 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2461 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2462
2463 IEM_MC_ADVANCE_RIP();
2464 IEM_MC_END();
2465 }
2466 return VINF_SUCCESS;
2467
2468 case 0: /* MMX */
2469 IEMOP_MNEMONIC("movq Pq,Qq");
2470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2471 {
2472 /*
2473 * Register, register.
2474 */
2475 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2476 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 IEM_MC_BEGIN(0, 1);
2479 IEM_MC_LOCAL(uint64_t, u64Tmp);
2480 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2481 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2482 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2483 IEM_MC_ADVANCE_RIP();
2484 IEM_MC_END();
2485 }
2486 else
2487 {
2488 /*
2489 * Register, memory.
2490 */
2491 IEM_MC_BEGIN(0, 2);
2492 IEM_MC_LOCAL(uint64_t, u64Tmp);
2493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2494
2495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2497 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2498 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2499 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2500
2501 IEM_MC_ADVANCE_RIP();
2502 IEM_MC_END();
2503 }
2504 return VINF_SUCCESS;
2505
2506 default:
2507 return IEMOP_RAISE_INVALID_OPCODE();
2508 }
2509}
2510
2511
2512/** Opcode 0x0f 0x70. The immediate here is evil! */
2513FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2514{
2515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2516 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2517 {
2518 case IEM_OP_PRF_SIZE_OP: /* SSE */
2519 case IEM_OP_PRF_REPNZ: /* SSE */
2520 case IEM_OP_PRF_REPZ: /* SSE */
2521 {
2522 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2523 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2524 {
2525 case IEM_OP_PRF_SIZE_OP:
2526 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2527 pfnAImpl = iemAImpl_pshufd;
2528 break;
2529 case IEM_OP_PRF_REPNZ:
2530 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2531 pfnAImpl = iemAImpl_pshuflw;
2532 break;
2533 case IEM_OP_PRF_REPZ:
2534 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2535 pfnAImpl = iemAImpl_pshufhw;
2536 break;
2537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2538 }
2539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2540 {
2541 /*
2542 * Register, register.
2543 */
2544 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2546
2547 IEM_MC_BEGIN(3, 0);
2548 IEM_MC_ARG(uint128_t *, pDst, 0);
2549 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2550 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2552 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2553 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2554 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2555 IEM_MC_ADVANCE_RIP();
2556 IEM_MC_END();
2557 }
2558 else
2559 {
2560 /*
2561 * Register, memory.
2562 */
2563 IEM_MC_BEGIN(3, 2);
2564 IEM_MC_ARG(uint128_t *, pDst, 0);
2565 IEM_MC_LOCAL(uint128_t, uSrc);
2566 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2568
2569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2570 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2571 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2574
2575 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2576 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2577 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2578
2579 IEM_MC_ADVANCE_RIP();
2580 IEM_MC_END();
2581 }
2582 return VINF_SUCCESS;
2583 }
2584
2585 case 0: /* MMX Extension */
2586 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2587 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2588 {
2589 /*
2590 * Register, register.
2591 */
2592 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2594
2595 IEM_MC_BEGIN(3, 0);
2596 IEM_MC_ARG(uint64_t *, pDst, 0);
2597 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2598 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2599 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2600 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2601 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2602 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2603 IEM_MC_ADVANCE_RIP();
2604 IEM_MC_END();
2605 }
2606 else
2607 {
2608 /*
2609 * Register, memory.
2610 */
2611 IEM_MC_BEGIN(3, 2);
2612 IEM_MC_ARG(uint64_t *, pDst, 0);
2613 IEM_MC_LOCAL(uint64_t, uSrc);
2614 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2616
2617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2618 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2619 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2621 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2622
2623 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2624 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2625 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 return VINF_SUCCESS;
2631
2632 default:
2633 return IEMOP_RAISE_INVALID_OPCODE();
2634 }
2635}
2636
2637
2638/** Opcode 0x0f 0x71 11/2. */
2639FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2640
2641/** Opcode 0x66 0x0f 0x71 11/2. */
2642FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2643
2644/** Opcode 0x0f 0x71 11/4. */
2645FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2646
2647/** Opcode 0x66 0x0f 0x71 11/4. */
2648FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2649
2650/** Opcode 0x0f 0x71 11/6. */
2651FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2652
2653/** Opcode 0x66 0x0f 0x71 11/6. */
2654FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2655
2656
2657/** Opcode 0x0f 0x71. */
2658FNIEMOP_DEF(iemOp_Grp12)
2659{
2660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2661 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2662 return IEMOP_RAISE_INVALID_OPCODE();
2663 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2664 {
2665 case 0: case 1: case 3: case 5: case 7:
2666 return IEMOP_RAISE_INVALID_OPCODE();
2667 case 2:
2668 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2669 {
2670 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2671 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2672 default: return IEMOP_RAISE_INVALID_OPCODE();
2673 }
2674 case 4:
2675 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2676 {
2677 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2678 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2679 default: return IEMOP_RAISE_INVALID_OPCODE();
2680 }
2681 case 6:
2682 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2683 {
2684 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2685 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2686 default: return IEMOP_RAISE_INVALID_OPCODE();
2687 }
2688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2689 }
2690}
2691
2692
2693/** Opcode 0x0f 0x72 11/2. */
2694FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2695
2696/** Opcode 0x66 0x0f 0x72 11/2. */
2697FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2698
2699/** Opcode 0x0f 0x72 11/4. */
2700FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2701
2702/** Opcode 0x66 0x0f 0x72 11/4. */
2703FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2704
2705/** Opcode 0x0f 0x72 11/6. */
2706FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2707
2708/** Opcode 0x66 0x0f 0x72 11/6. */
2709FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2710
2711
2712/** Opcode 0x0f 0x72. */
2713FNIEMOP_DEF(iemOp_Grp13)
2714{
2715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2716 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2717 return IEMOP_RAISE_INVALID_OPCODE();
2718 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2719 {
2720 case 0: case 1: case 3: case 5: case 7:
2721 return IEMOP_RAISE_INVALID_OPCODE();
2722 case 2:
2723 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2724 {
2725 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2726 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2727 default: return IEMOP_RAISE_INVALID_OPCODE();
2728 }
2729 case 4:
2730 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2731 {
2732 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2733 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2734 default: return IEMOP_RAISE_INVALID_OPCODE();
2735 }
2736 case 6:
2737 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2738 {
2739 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2740 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2741 default: return IEMOP_RAISE_INVALID_OPCODE();
2742 }
2743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2744 }
2745}
2746
2747
2748/** Opcode 0x0f 0x73 11/2. */
2749FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2750
2751/** Opcode 0x66 0x0f 0x73 11/2. */
2752FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2753
2754/** Opcode 0x66 0x0f 0x73 11/3. */
2755FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2756
2757/** Opcode 0x0f 0x73 11/6. */
2758FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2759
2760/** Opcode 0x66 0x0f 0x73 11/6. */
2761FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2762
2763/** Opcode 0x66 0x0f 0x73 11/7. */
2764FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2765
2766
2767/** Opcode 0x0f 0x73. */
2768FNIEMOP_DEF(iemOp_Grp14)
2769{
2770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2771 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2772 return IEMOP_RAISE_INVALID_OPCODE();
2773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2774 {
2775 case 0: case 1: case 4: case 5:
2776 return IEMOP_RAISE_INVALID_OPCODE();
2777 case 2:
2778 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2779 {
2780 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2781 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2782 default: return IEMOP_RAISE_INVALID_OPCODE();
2783 }
2784 case 3:
2785 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2786 {
2787 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2788 default: return IEMOP_RAISE_INVALID_OPCODE();
2789 }
2790 case 6:
2791 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2792 {
2793 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2794 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2795 default: return IEMOP_RAISE_INVALID_OPCODE();
2796 }
2797 case 7:
2798 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2799 {
2800 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2801 default: return IEMOP_RAISE_INVALID_OPCODE();
2802 }
2803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2804 }
2805}
2806
2807
2808/**
2809 * Common worker for SSE2 and MMX instructions on the forms:
2810 * pxxx mm1, mm2/mem64
2811 * pxxx xmm1, xmm2/mem128
2812 *
2813 * Proper alignment of the 128-bit operand is enforced.
2814 * Exceptions type 4. SSE2 and MMX cpuid checks.
2815 */
2816FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2817{
2818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2819 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2820 {
2821 case IEM_OP_PRF_SIZE_OP: /* SSE */
2822 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2823 {
2824 /*
2825 * Register, register.
2826 */
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828 IEM_MC_BEGIN(2, 0);
2829 IEM_MC_ARG(uint128_t *, pDst, 0);
2830 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2831 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2832 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2833 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2834 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(2, 2);
2844 IEM_MC_ARG(uint128_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint128_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2851 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2852 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2853
2854 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2855 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2856
2857 IEM_MC_ADVANCE_RIP();
2858 IEM_MC_END();
2859 }
2860 return VINF_SUCCESS;
2861
2862 case 0: /* MMX */
2863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2864 {
2865 /*
2866 * Register, register.
2867 */
2868 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2869 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2871 IEM_MC_BEGIN(2, 0);
2872 IEM_MC_ARG(uint64_t *, pDst, 0);
2873 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2874 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2875 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2876 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2877 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2878 IEM_MC_ADVANCE_RIP();
2879 IEM_MC_END();
2880 }
2881 else
2882 {
2883 /*
2884 * Register, memory.
2885 */
2886 IEM_MC_BEGIN(2, 2);
2887 IEM_MC_ARG(uint64_t *, pDst, 0);
2888 IEM_MC_LOCAL(uint64_t, uSrc);
2889 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2891
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2895 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2896
2897 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2898 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2899
2900 IEM_MC_ADVANCE_RIP();
2901 IEM_MC_END();
2902 }
2903 return VINF_SUCCESS;
2904
2905 default:
2906 return IEMOP_RAISE_INVALID_OPCODE();
2907 }
2908}
2909
2910
2911/** Opcode 0x0f 0x74. */
2912FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2913{
2914 IEMOP_MNEMONIC("pcmpeqb");
2915 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2916}
2917
2918
2919/** Opcode 0x0f 0x75. */
2920FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2921{
2922 IEMOP_MNEMONIC("pcmpeqw");
2923 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2924}
2925
2926
2927/** Opcode 0x0f 0x76. */
2928FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2929{
2930 IEMOP_MNEMONIC("pcmpeqd");
2931 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2932}
2933
2934
2935/** Opcode 0x0f 0x77. */
2936FNIEMOP_STUB(iemOp_emms);
2937/** Opcode 0x0f 0x78. */
2938FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2939/** Opcode 0x0f 0x79. */
2940FNIEMOP_UD_STUB(iemOp_vmwrite);
2941/** Opcode 0x0f 0x7c. */
2942FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2943/** Opcode 0x0f 0x7d. */
2944FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2945
2946
2947/** Opcode 0x0f 0x7e. */
2948FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2949{
2950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2951 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2952 {
2953 case IEM_OP_PRF_SIZE_OP: /* SSE */
2954 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2955 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2956 {
2957 /* greg, XMM */
2958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2959 IEM_MC_BEGIN(0, 1);
2960 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2961 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2962 {
2963 IEM_MC_LOCAL(uint64_t, u64Tmp);
2964 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2965 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2966 }
2967 else
2968 {
2969 IEM_MC_LOCAL(uint32_t, u32Tmp);
2970 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2971 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2972 }
2973 IEM_MC_ADVANCE_RIP();
2974 IEM_MC_END();
2975 }
2976 else
2977 {
2978 /* [mem], XMM */
2979 IEM_MC_BEGIN(0, 2);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2981 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2984 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2985 {
2986 IEM_MC_LOCAL(uint64_t, u64Tmp);
2987 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2988 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2989 }
2990 else
2991 {
2992 IEM_MC_LOCAL(uint32_t, u32Tmp);
2993 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2994 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2995 }
2996 IEM_MC_ADVANCE_RIP();
2997 IEM_MC_END();
2998 }
2999 return VINF_SUCCESS;
3000
3001 case 0: /* MMX */
3002 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3004 {
3005 /* greg, MMX */
3006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3007 IEM_MC_BEGIN(0, 1);
3008 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3009 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3010 {
3011 IEM_MC_LOCAL(uint64_t, u64Tmp);
3012 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3013 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3014 }
3015 else
3016 {
3017 IEM_MC_LOCAL(uint32_t, u32Tmp);
3018 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3019 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3020 }
3021 IEM_MC_ADVANCE_RIP();
3022 IEM_MC_END();
3023 }
3024 else
3025 {
3026 /* [mem], MMX */
3027 IEM_MC_BEGIN(0, 2);
3028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3029 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3032 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3033 {
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3036 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3037 }
3038 else
3039 {
3040 IEM_MC_LOCAL(uint32_t, u32Tmp);
3041 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3042 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3043 }
3044 IEM_MC_ADVANCE_RIP();
3045 IEM_MC_END();
3046 }
3047 return VINF_SUCCESS;
3048
3049 default:
3050 return IEMOP_RAISE_INVALID_OPCODE();
3051 }
3052}
3053
3054
3055/** Opcode 0x0f 0x7f. */
3056FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 bool fAligned = false;
3060 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3061 {
3062 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3063 fAligned = true;
3064 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3065 if (fAligned)
3066 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3067 else
3068 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3070 {
3071 /*
3072 * Register, register.
3073 */
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_BEGIN(0, 1);
3076 IEM_MC_LOCAL(uint128_t, u128Tmp);
3077 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3078 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3079 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(0, 2);
3089 IEM_MC_LOCAL(uint128_t, u128Tmp);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3095 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3096 if (fAligned)
3097 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3098 else
3099 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3100
3101 IEM_MC_ADVANCE_RIP();
3102 IEM_MC_END();
3103 }
3104 return VINF_SUCCESS;
3105
3106 case 0: /* MMX */
3107 IEMOP_MNEMONIC("movq Qq,Pq");
3108
3109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3110 {
3111 /*
3112 * Register, register.
3113 */
3114 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3115 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3117 IEM_MC_BEGIN(0, 1);
3118 IEM_MC_LOCAL(uint64_t, u64Tmp);
3119 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3120 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3121 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3122 IEM_MC_ADVANCE_RIP();
3123 IEM_MC_END();
3124 }
3125 else
3126 {
3127 /*
3128 * Register, memory.
3129 */
3130 IEM_MC_BEGIN(0, 2);
3131 IEM_MC_LOCAL(uint64_t, u64Tmp);
3132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3133
3134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3136 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3137 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3138 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3139
3140 IEM_MC_ADVANCE_RIP();
3141 IEM_MC_END();
3142 }
3143 return VINF_SUCCESS;
3144
3145 default:
3146 return IEMOP_RAISE_INVALID_OPCODE();
3147 }
3148}
3149
3150
3151
3152/** Opcode 0x0f 0x80. */
3153FNIEMOP_DEF(iemOp_jo_Jv)
3154{
3155 IEMOP_MNEMONIC("jo Jv");
3156 IEMOP_HLP_MIN_386();
3157 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3158 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3159 {
3160 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3161 IEMOP_HLP_NO_LOCK_PREFIX();
3162
3163 IEM_MC_BEGIN(0, 0);
3164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3165 IEM_MC_REL_JMP_S16(i16Imm);
3166 } IEM_MC_ELSE() {
3167 IEM_MC_ADVANCE_RIP();
3168 } IEM_MC_ENDIF();
3169 IEM_MC_END();
3170 }
3171 else
3172 {
3173 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3174 IEMOP_HLP_NO_LOCK_PREFIX();
3175
3176 IEM_MC_BEGIN(0, 0);
3177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3178 IEM_MC_REL_JMP_S32(i32Imm);
3179 } IEM_MC_ELSE() {
3180 IEM_MC_ADVANCE_RIP();
3181 } IEM_MC_ENDIF();
3182 IEM_MC_END();
3183 }
3184 return VINF_SUCCESS;
3185}
3186
3187
3188/** Opcode 0x0f 0x81. */
3189FNIEMOP_DEF(iemOp_jno_Jv)
3190{
3191 IEMOP_MNEMONIC("jno Jv");
3192 IEMOP_HLP_MIN_386();
3193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3194 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3195 {
3196 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3197 IEMOP_HLP_NO_LOCK_PREFIX();
3198
3199 IEM_MC_BEGIN(0, 0);
3200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3201 IEM_MC_ADVANCE_RIP();
3202 } IEM_MC_ELSE() {
3203 IEM_MC_REL_JMP_S16(i16Imm);
3204 } IEM_MC_ENDIF();
3205 IEM_MC_END();
3206 }
3207 else
3208 {
3209 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3210 IEMOP_HLP_NO_LOCK_PREFIX();
3211
3212 IEM_MC_BEGIN(0, 0);
3213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3214 IEM_MC_ADVANCE_RIP();
3215 } IEM_MC_ELSE() {
3216 IEM_MC_REL_JMP_S32(i32Imm);
3217 } IEM_MC_ENDIF();
3218 IEM_MC_END();
3219 }
3220 return VINF_SUCCESS;
3221}
3222
3223
3224/** Opcode 0x0f 0x82. */
3225FNIEMOP_DEF(iemOp_jc_Jv)
3226{
3227 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3228 IEMOP_HLP_MIN_386();
3229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3230 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3231 {
3232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3233 IEMOP_HLP_NO_LOCK_PREFIX();
3234
3235 IEM_MC_BEGIN(0, 0);
3236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3237 IEM_MC_REL_JMP_S16(i16Imm);
3238 } IEM_MC_ELSE() {
3239 IEM_MC_ADVANCE_RIP();
3240 } IEM_MC_ENDIF();
3241 IEM_MC_END();
3242 }
3243 else
3244 {
3245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3246 IEMOP_HLP_NO_LOCK_PREFIX();
3247
3248 IEM_MC_BEGIN(0, 0);
3249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3250 IEM_MC_REL_JMP_S32(i32Imm);
3251 } IEM_MC_ELSE() {
3252 IEM_MC_ADVANCE_RIP();
3253 } IEM_MC_ENDIF();
3254 IEM_MC_END();
3255 }
3256 return VINF_SUCCESS;
3257}
3258
3259
3260/** Opcode 0x0f 0x83. */
3261FNIEMOP_DEF(iemOp_jnc_Jv)
3262{
3263 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3264 IEMOP_HLP_MIN_386();
3265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3266 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3267 {
3268 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3269 IEMOP_HLP_NO_LOCK_PREFIX();
3270
3271 IEM_MC_BEGIN(0, 0);
3272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3273 IEM_MC_ADVANCE_RIP();
3274 } IEM_MC_ELSE() {
3275 IEM_MC_REL_JMP_S16(i16Imm);
3276 } IEM_MC_ENDIF();
3277 IEM_MC_END();
3278 }
3279 else
3280 {
3281 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3282 IEMOP_HLP_NO_LOCK_PREFIX();
3283
3284 IEM_MC_BEGIN(0, 0);
3285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3286 IEM_MC_ADVANCE_RIP();
3287 } IEM_MC_ELSE() {
3288 IEM_MC_REL_JMP_S32(i32Imm);
3289 } IEM_MC_ENDIF();
3290 IEM_MC_END();
3291 }
3292 return VINF_SUCCESS;
3293}
3294
3295
3296/** Opcode 0x0f 0x84. */
3297FNIEMOP_DEF(iemOp_je_Jv)
3298{
3299 IEMOP_MNEMONIC("je/jz Jv");
3300 IEMOP_HLP_MIN_386();
3301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3302 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3303 {
3304 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3305 IEMOP_HLP_NO_LOCK_PREFIX();
3306
3307 IEM_MC_BEGIN(0, 0);
3308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3309 IEM_MC_REL_JMP_S16(i16Imm);
3310 } IEM_MC_ELSE() {
3311 IEM_MC_ADVANCE_RIP();
3312 } IEM_MC_ENDIF();
3313 IEM_MC_END();
3314 }
3315 else
3316 {
3317 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3318 IEMOP_HLP_NO_LOCK_PREFIX();
3319
3320 IEM_MC_BEGIN(0, 0);
3321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3322 IEM_MC_REL_JMP_S32(i32Imm);
3323 } IEM_MC_ELSE() {
3324 IEM_MC_ADVANCE_RIP();
3325 } IEM_MC_ENDIF();
3326 IEM_MC_END();
3327 }
3328 return VINF_SUCCESS;
3329}
3330
3331
3332/** Opcode 0x0f 0x85. */
3333FNIEMOP_DEF(iemOp_jne_Jv)
3334{
3335 IEMOP_MNEMONIC("jne/jnz Jv");
3336 IEMOP_HLP_MIN_386();
3337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3338 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3339 {
3340 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3341 IEMOP_HLP_NO_LOCK_PREFIX();
3342
3343 IEM_MC_BEGIN(0, 0);
3344 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3345 IEM_MC_ADVANCE_RIP();
3346 } IEM_MC_ELSE() {
3347 IEM_MC_REL_JMP_S16(i16Imm);
3348 } IEM_MC_ENDIF();
3349 IEM_MC_END();
3350 }
3351 else
3352 {
3353 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3354 IEMOP_HLP_NO_LOCK_PREFIX();
3355
3356 IEM_MC_BEGIN(0, 0);
3357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3358 IEM_MC_ADVANCE_RIP();
3359 } IEM_MC_ELSE() {
3360 IEM_MC_REL_JMP_S32(i32Imm);
3361 } IEM_MC_ENDIF();
3362 IEM_MC_END();
3363 }
3364 return VINF_SUCCESS;
3365}
3366
3367
3368/** Opcode 0x0f 0x86. */
3369FNIEMOP_DEF(iemOp_jbe_Jv)
3370{
3371 IEMOP_MNEMONIC("jbe/jna Jv");
3372 IEMOP_HLP_MIN_386();
3373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3374 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3375 {
3376 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3377 IEMOP_HLP_NO_LOCK_PREFIX();
3378
3379 IEM_MC_BEGIN(0, 0);
3380 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3381 IEM_MC_REL_JMP_S16(i16Imm);
3382 } IEM_MC_ELSE() {
3383 IEM_MC_ADVANCE_RIP();
3384 } IEM_MC_ENDIF();
3385 IEM_MC_END();
3386 }
3387 else
3388 {
3389 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3390 IEMOP_HLP_NO_LOCK_PREFIX();
3391
3392 IEM_MC_BEGIN(0, 0);
3393 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3394 IEM_MC_REL_JMP_S32(i32Imm);
3395 } IEM_MC_ELSE() {
3396 IEM_MC_ADVANCE_RIP();
3397 } IEM_MC_ENDIF();
3398 IEM_MC_END();
3399 }
3400 return VINF_SUCCESS;
3401}
3402
3403
3404/** Opcode 0x0f 0x87. */
3405FNIEMOP_DEF(iemOp_jnbe_Jv)
3406{
3407 IEMOP_MNEMONIC("jnbe/ja Jv");
3408 IEMOP_HLP_MIN_386();
3409 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3410 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3411 {
3412 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3413 IEMOP_HLP_NO_LOCK_PREFIX();
3414
3415 IEM_MC_BEGIN(0, 0);
3416 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3417 IEM_MC_ADVANCE_RIP();
3418 } IEM_MC_ELSE() {
3419 IEM_MC_REL_JMP_S16(i16Imm);
3420 } IEM_MC_ENDIF();
3421 IEM_MC_END();
3422 }
3423 else
3424 {
3425 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3426 IEMOP_HLP_NO_LOCK_PREFIX();
3427
3428 IEM_MC_BEGIN(0, 0);
3429 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3430 IEM_MC_ADVANCE_RIP();
3431 } IEM_MC_ELSE() {
3432 IEM_MC_REL_JMP_S32(i32Imm);
3433 } IEM_MC_ENDIF();
3434 IEM_MC_END();
3435 }
3436 return VINF_SUCCESS;
3437}
3438
3439
3440/** Opcode 0x0f 0x88. */
3441FNIEMOP_DEF(iemOp_js_Jv)
3442{
3443 IEMOP_MNEMONIC("js Jv");
3444 IEMOP_HLP_MIN_386();
3445 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3446 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3447 {
3448 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3449 IEMOP_HLP_NO_LOCK_PREFIX();
3450
3451 IEM_MC_BEGIN(0, 0);
3452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3453 IEM_MC_REL_JMP_S16(i16Imm);
3454 } IEM_MC_ELSE() {
3455 IEM_MC_ADVANCE_RIP();
3456 } IEM_MC_ENDIF();
3457 IEM_MC_END();
3458 }
3459 else
3460 {
3461 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3462 IEMOP_HLP_NO_LOCK_PREFIX();
3463
3464 IEM_MC_BEGIN(0, 0);
3465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3466 IEM_MC_REL_JMP_S32(i32Imm);
3467 } IEM_MC_ELSE() {
3468 IEM_MC_ADVANCE_RIP();
3469 } IEM_MC_ENDIF();
3470 IEM_MC_END();
3471 }
3472 return VINF_SUCCESS;
3473}
3474
3475
3476/** Opcode 0x0f 0x89. */
3477FNIEMOP_DEF(iemOp_jns_Jv)
3478{
3479 IEMOP_MNEMONIC("jns Jv");
3480 IEMOP_HLP_MIN_386();
3481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3482 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3483 {
3484 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3485 IEMOP_HLP_NO_LOCK_PREFIX();
3486
3487 IEM_MC_BEGIN(0, 0);
3488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3489 IEM_MC_ADVANCE_RIP();
3490 } IEM_MC_ELSE() {
3491 IEM_MC_REL_JMP_S16(i16Imm);
3492 } IEM_MC_ENDIF();
3493 IEM_MC_END();
3494 }
3495 else
3496 {
3497 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3498 IEMOP_HLP_NO_LOCK_PREFIX();
3499
3500 IEM_MC_BEGIN(0, 0);
3501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3502 IEM_MC_ADVANCE_RIP();
3503 } IEM_MC_ELSE() {
3504 IEM_MC_REL_JMP_S32(i32Imm);
3505 } IEM_MC_ENDIF();
3506 IEM_MC_END();
3507 }
3508 return VINF_SUCCESS;
3509}
3510
3511
3512/** Opcode 0x0f 0x8a. */
3513FNIEMOP_DEF(iemOp_jp_Jv)
3514{
3515 IEMOP_MNEMONIC("jp Jv");
3516 IEMOP_HLP_MIN_386();
3517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3518 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3519 {
3520 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3521 IEMOP_HLP_NO_LOCK_PREFIX();
3522
3523 IEM_MC_BEGIN(0, 0);
3524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3525 IEM_MC_REL_JMP_S16(i16Imm);
3526 } IEM_MC_ELSE() {
3527 IEM_MC_ADVANCE_RIP();
3528 } IEM_MC_ENDIF();
3529 IEM_MC_END();
3530 }
3531 else
3532 {
3533 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3534 IEMOP_HLP_NO_LOCK_PREFIX();
3535
3536 IEM_MC_BEGIN(0, 0);
3537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3538 IEM_MC_REL_JMP_S32(i32Imm);
3539 } IEM_MC_ELSE() {
3540 IEM_MC_ADVANCE_RIP();
3541 } IEM_MC_ENDIF();
3542 IEM_MC_END();
3543 }
3544 return VINF_SUCCESS;
3545}
3546
3547
3548/** Opcode 0x0f 0x8b. */
3549FNIEMOP_DEF(iemOp_jnp_Jv)
3550{
3551 IEMOP_MNEMONIC("jo Jv");
3552 IEMOP_HLP_MIN_386();
3553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3554 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3555 {
3556 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3557 IEMOP_HLP_NO_LOCK_PREFIX();
3558
3559 IEM_MC_BEGIN(0, 0);
3560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3561 IEM_MC_ADVANCE_RIP();
3562 } IEM_MC_ELSE() {
3563 IEM_MC_REL_JMP_S16(i16Imm);
3564 } IEM_MC_ENDIF();
3565 IEM_MC_END();
3566 }
3567 else
3568 {
3569 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3570 IEMOP_HLP_NO_LOCK_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0);
3573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3574 IEM_MC_ADVANCE_RIP();
3575 } IEM_MC_ELSE() {
3576 IEM_MC_REL_JMP_S32(i32Imm);
3577 } IEM_MC_ENDIF();
3578 IEM_MC_END();
3579 }
3580 return VINF_SUCCESS;
3581}
3582
3583
3584/** Opcode 0x0f 0x8c. */
3585FNIEMOP_DEF(iemOp_jl_Jv)
3586{
3587 IEMOP_MNEMONIC("jl/jnge Jv");
3588 IEMOP_HLP_MIN_386();
3589 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3590 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3591 {
3592 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3593 IEMOP_HLP_NO_LOCK_PREFIX();
3594
3595 IEM_MC_BEGIN(0, 0);
3596 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3597 IEM_MC_REL_JMP_S16(i16Imm);
3598 } IEM_MC_ELSE() {
3599 IEM_MC_ADVANCE_RIP();
3600 } IEM_MC_ENDIF();
3601 IEM_MC_END();
3602 }
3603 else
3604 {
3605 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3606 IEMOP_HLP_NO_LOCK_PREFIX();
3607
3608 IEM_MC_BEGIN(0, 0);
3609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3610 IEM_MC_REL_JMP_S32(i32Imm);
3611 } IEM_MC_ELSE() {
3612 IEM_MC_ADVANCE_RIP();
3613 } IEM_MC_ENDIF();
3614 IEM_MC_END();
3615 }
3616 return VINF_SUCCESS;
3617}
3618
3619
3620/** Opcode 0x0f 0x8d. */
3621FNIEMOP_DEF(iemOp_jnl_Jv)
3622{
3623 IEMOP_MNEMONIC("jnl/jge Jv");
3624 IEMOP_HLP_MIN_386();
3625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3626 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3627 {
3628 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3629 IEMOP_HLP_NO_LOCK_PREFIX();
3630
3631 IEM_MC_BEGIN(0, 0);
3632 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3633 IEM_MC_ADVANCE_RIP();
3634 } IEM_MC_ELSE() {
3635 IEM_MC_REL_JMP_S16(i16Imm);
3636 } IEM_MC_ENDIF();
3637 IEM_MC_END();
3638 }
3639 else
3640 {
3641 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3642 IEMOP_HLP_NO_LOCK_PREFIX();
3643
3644 IEM_MC_BEGIN(0, 0);
3645 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3646 IEM_MC_ADVANCE_RIP();
3647 } IEM_MC_ELSE() {
3648 IEM_MC_REL_JMP_S32(i32Imm);
3649 } IEM_MC_ENDIF();
3650 IEM_MC_END();
3651 }
3652 return VINF_SUCCESS;
3653}
3654
3655
3656/** Opcode 0x0f 0x8e. */
3657FNIEMOP_DEF(iemOp_jle_Jv)
3658{
3659 IEMOP_MNEMONIC("jle/jng Jv");
3660 IEMOP_HLP_MIN_386();
3661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3662 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3663 {
3664 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3665 IEMOP_HLP_NO_LOCK_PREFIX();
3666
3667 IEM_MC_BEGIN(0, 0);
3668 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3669 IEM_MC_REL_JMP_S16(i16Imm);
3670 } IEM_MC_ELSE() {
3671 IEM_MC_ADVANCE_RIP();
3672 } IEM_MC_ENDIF();
3673 IEM_MC_END();
3674 }
3675 else
3676 {
3677 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3678 IEMOP_HLP_NO_LOCK_PREFIX();
3679
3680 IEM_MC_BEGIN(0, 0);
3681 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3682 IEM_MC_REL_JMP_S32(i32Imm);
3683 } IEM_MC_ELSE() {
3684 IEM_MC_ADVANCE_RIP();
3685 } IEM_MC_ENDIF();
3686 IEM_MC_END();
3687 }
3688 return VINF_SUCCESS;
3689}
3690
3691
3692/** Opcode 0x0f 0x8f. */
3693FNIEMOP_DEF(iemOp_jnle_Jv)
3694{
3695 IEMOP_MNEMONIC("jnle/jg Jv");
3696 IEMOP_HLP_MIN_386();
3697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3698 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3699 {
3700 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3701 IEMOP_HLP_NO_LOCK_PREFIX();
3702
3703 IEM_MC_BEGIN(0, 0);
3704 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3705 IEM_MC_ADVANCE_RIP();
3706 } IEM_MC_ELSE() {
3707 IEM_MC_REL_JMP_S16(i16Imm);
3708 } IEM_MC_ENDIF();
3709 IEM_MC_END();
3710 }
3711 else
3712 {
3713 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3714 IEMOP_HLP_NO_LOCK_PREFIX();
3715
3716 IEM_MC_BEGIN(0, 0);
3717 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3718 IEM_MC_ADVANCE_RIP();
3719 } IEM_MC_ELSE() {
3720 IEM_MC_REL_JMP_S32(i32Imm);
3721 } IEM_MC_ENDIF();
3722 IEM_MC_END();
3723 }
3724 return VINF_SUCCESS;
3725}
3726
3727
3728/** Opcode 0x0f 0x90. */
3729FNIEMOP_DEF(iemOp_seto_Eb)
3730{
3731 IEMOP_MNEMONIC("seto Eb");
3732 IEMOP_HLP_MIN_386();
3733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3734 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3735
3736 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3737 * any way. AMD says it's "unused", whatever that means. We're
3738 * ignoring for now. */
3739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3740 {
3741 /* register target */
3742 IEM_MC_BEGIN(0, 0);
3743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3744 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3745 } IEM_MC_ELSE() {
3746 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3747 } IEM_MC_ENDIF();
3748 IEM_MC_ADVANCE_RIP();
3749 IEM_MC_END();
3750 }
3751 else
3752 {
3753 /* memory target */
3754 IEM_MC_BEGIN(0, 1);
3755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3758 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3759 } IEM_MC_ELSE() {
3760 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3761 } IEM_MC_ENDIF();
3762 IEM_MC_ADVANCE_RIP();
3763 IEM_MC_END();
3764 }
3765 return VINF_SUCCESS;
3766}
3767
3768
3769/** Opcode 0x0f 0x91. */
3770FNIEMOP_DEF(iemOp_setno_Eb)
3771{
3772 IEMOP_MNEMONIC("setno Eb");
3773 IEMOP_HLP_MIN_386();
3774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3775 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3776
3777 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3778 * any way. AMD says it's "unused", whatever that means. We're
3779 * ignoring for now. */
3780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3781 {
3782 /* register target */
3783 IEM_MC_BEGIN(0, 0);
3784 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3785 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3786 } IEM_MC_ELSE() {
3787 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3788 } IEM_MC_ENDIF();
3789 IEM_MC_ADVANCE_RIP();
3790 IEM_MC_END();
3791 }
3792 else
3793 {
3794 /* memory target */
3795 IEM_MC_BEGIN(0, 1);
3796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3799 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3800 } IEM_MC_ELSE() {
3801 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3802 } IEM_MC_ENDIF();
3803 IEM_MC_ADVANCE_RIP();
3804 IEM_MC_END();
3805 }
3806 return VINF_SUCCESS;
3807}
3808
3809
3810/** Opcode 0x0f 0x92. */
3811FNIEMOP_DEF(iemOp_setc_Eb)
3812{
3813 IEMOP_MNEMONIC("setc Eb");
3814 IEMOP_HLP_MIN_386();
3815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3816 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3817
3818 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3819 * any way. AMD says it's "unused", whatever that means. We're
3820 * ignoring for now. */
3821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3822 {
3823 /* register target */
3824 IEM_MC_BEGIN(0, 0);
3825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3826 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3827 } IEM_MC_ELSE() {
3828 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3829 } IEM_MC_ENDIF();
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 }
3833 else
3834 {
3835 /* memory target */
3836 IEM_MC_BEGIN(0, 1);
3837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3840 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3841 } IEM_MC_ELSE() {
3842 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3843 } IEM_MC_ENDIF();
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 }
3847 return VINF_SUCCESS;
3848}
3849
3850
3851/** Opcode 0x0f 0x93. */
3852FNIEMOP_DEF(iemOp_setnc_Eb)
3853{
3854 IEMOP_MNEMONIC("setnc Eb");
3855 IEMOP_HLP_MIN_386();
3856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3857 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3858
3859 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3860 * any way. AMD says it's "unused", whatever that means. We're
3861 * ignoring for now. */
3862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3863 {
3864 /* register target */
3865 IEM_MC_BEGIN(0, 0);
3866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3867 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3868 } IEM_MC_ELSE() {
3869 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3870 } IEM_MC_ENDIF();
3871 IEM_MC_ADVANCE_RIP();
3872 IEM_MC_END();
3873 }
3874 else
3875 {
3876 /* memory target */
3877 IEM_MC_BEGIN(0, 1);
3878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3881 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3882 } IEM_MC_ELSE() {
3883 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3884 } IEM_MC_ENDIF();
3885 IEM_MC_ADVANCE_RIP();
3886 IEM_MC_END();
3887 }
3888 return VINF_SUCCESS;
3889}
3890
3891
3892/** Opcode 0x0f 0x94. */
3893FNIEMOP_DEF(iemOp_sete_Eb)
3894{
3895 IEMOP_MNEMONIC("sete Eb");
3896 IEMOP_HLP_MIN_386();
3897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3898 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3899
3900 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3901 * any way. AMD says it's "unused", whatever that means. We're
3902 * ignoring for now. */
3903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3904 {
3905 /* register target */
3906 IEM_MC_BEGIN(0, 0);
3907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3908 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3909 } IEM_MC_ELSE() {
3910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3911 } IEM_MC_ENDIF();
3912 IEM_MC_ADVANCE_RIP();
3913 IEM_MC_END();
3914 }
3915 else
3916 {
3917 /* memory target */
3918 IEM_MC_BEGIN(0, 1);
3919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3922 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3923 } IEM_MC_ELSE() {
3924 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3925 } IEM_MC_ENDIF();
3926 IEM_MC_ADVANCE_RIP();
3927 IEM_MC_END();
3928 }
3929 return VINF_SUCCESS;
3930}
3931
3932
3933/** Opcode 0x0f 0x95. */
3934FNIEMOP_DEF(iemOp_setne_Eb)
3935{
3936 IEMOP_MNEMONIC("setne Eb");
3937 IEMOP_HLP_MIN_386();
3938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3939 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3940
3941 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3942 * any way. AMD says it's "unused", whatever that means. We're
3943 * ignoring for now. */
3944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3945 {
3946 /* register target */
3947 IEM_MC_BEGIN(0, 0);
3948 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3949 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3950 } IEM_MC_ELSE() {
3951 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3952 } IEM_MC_ENDIF();
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 else
3957 {
3958 /* memory target */
3959 IEM_MC_BEGIN(0, 1);
3960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3963 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3964 } IEM_MC_ELSE() {
3965 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3966 } IEM_MC_ENDIF();
3967 IEM_MC_ADVANCE_RIP();
3968 IEM_MC_END();
3969 }
3970 return VINF_SUCCESS;
3971}
3972
3973
3974/** Opcode 0x0f 0x96. */
3975FNIEMOP_DEF(iemOp_setbe_Eb)
3976{
3977 IEMOP_MNEMONIC("setbe Eb");
3978 IEMOP_HLP_MIN_386();
3979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3980 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3981
3982 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3983 * any way. AMD says it's "unused", whatever that means. We're
3984 * ignoring for now. */
3985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3986 {
3987 /* register target */
3988 IEM_MC_BEGIN(0, 0);
3989 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3990 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3991 } IEM_MC_ELSE() {
3992 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3993 } IEM_MC_ENDIF();
3994 IEM_MC_ADVANCE_RIP();
3995 IEM_MC_END();
3996 }
3997 else
3998 {
3999 /* memory target */
4000 IEM_MC_BEGIN(0, 1);
4001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4003 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4004 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4005 } IEM_MC_ELSE() {
4006 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4007 } IEM_MC_ENDIF();
4008 IEM_MC_ADVANCE_RIP();
4009 IEM_MC_END();
4010 }
4011 return VINF_SUCCESS;
4012}
4013
4014
4015/** Opcode 0x0f 0x97. */
4016FNIEMOP_DEF(iemOp_setnbe_Eb)
4017{
4018 IEMOP_MNEMONIC("setnbe Eb");
4019 IEMOP_HLP_MIN_386();
4020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4021 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4022
4023 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4024 * any way. AMD says it's "unused", whatever that means. We're
4025 * ignoring for now. */
4026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4027 {
4028 /* register target */
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4031 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4034 } IEM_MC_ENDIF();
4035 IEM_MC_ADVANCE_RIP();
4036 IEM_MC_END();
4037 }
4038 else
4039 {
4040 /* memory target */
4041 IEM_MC_BEGIN(0, 1);
4042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4044 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4045 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4046 } IEM_MC_ELSE() {
4047 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4048 } IEM_MC_ENDIF();
4049 IEM_MC_ADVANCE_RIP();
4050 IEM_MC_END();
4051 }
4052 return VINF_SUCCESS;
4053}
4054
4055
4056/** Opcode 0x0f 0x98. */
4057FNIEMOP_DEF(iemOp_sets_Eb)
4058{
4059 IEMOP_MNEMONIC("sets Eb");
4060 IEMOP_HLP_MIN_386();
4061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4062 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4063
4064 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4065 * any way. AMD says it's "unused", whatever that means. We're
4066 * ignoring for now. */
4067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4068 {
4069 /* register target */
4070 IEM_MC_BEGIN(0, 0);
4071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4072 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4073 } IEM_MC_ELSE() {
4074 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4075 } IEM_MC_ENDIF();
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 }
4079 else
4080 {
4081 /* memory target */
4082 IEM_MC_BEGIN(0, 1);
4083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4086 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4087 } IEM_MC_ELSE() {
4088 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4089 } IEM_MC_ENDIF();
4090 IEM_MC_ADVANCE_RIP();
4091 IEM_MC_END();
4092 }
4093 return VINF_SUCCESS;
4094}
4095
4096
4097/** Opcode 0x0f 0x99. */
4098FNIEMOP_DEF(iemOp_setns_Eb)
4099{
4100 IEMOP_MNEMONIC("setns Eb");
4101 IEMOP_HLP_MIN_386();
4102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4103 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4104
4105 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4106 * any way. AMD says it's "unused", whatever that means. We're
4107 * ignoring for now. */
4108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4109 {
4110 /* register target */
4111 IEM_MC_BEGIN(0, 0);
4112 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4113 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4114 } IEM_MC_ELSE() {
4115 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4116 } IEM_MC_ENDIF();
4117 IEM_MC_ADVANCE_RIP();
4118 IEM_MC_END();
4119 }
4120 else
4121 {
4122 /* memory target */
4123 IEM_MC_BEGIN(0, 1);
4124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4126 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4127 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4128 } IEM_MC_ELSE() {
4129 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4130 } IEM_MC_ENDIF();
4131 IEM_MC_ADVANCE_RIP();
4132 IEM_MC_END();
4133 }
4134 return VINF_SUCCESS;
4135}
4136
4137
4138/** Opcode 0x0f 0x9a. */
4139FNIEMOP_DEF(iemOp_setp_Eb)
4140{
4141 IEMOP_MNEMONIC("setnp Eb");
4142 IEMOP_HLP_MIN_386();
4143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4144 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4145
4146 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4147 * any way. AMD says it's "unused", whatever that means. We're
4148 * ignoring for now. */
4149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4150 {
4151 /* register target */
4152 IEM_MC_BEGIN(0, 0);
4153 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4154 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4155 } IEM_MC_ELSE() {
4156 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4157 } IEM_MC_ENDIF();
4158 IEM_MC_ADVANCE_RIP();
4159 IEM_MC_END();
4160 }
4161 else
4162 {
4163 /* memory target */
4164 IEM_MC_BEGIN(0, 1);
4165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4168 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4169 } IEM_MC_ELSE() {
4170 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4171 } IEM_MC_ENDIF();
4172 IEM_MC_ADVANCE_RIP();
4173 IEM_MC_END();
4174 }
4175 return VINF_SUCCESS;
4176}
4177
4178
4179/** Opcode 0x0f 0x9b. */
4180FNIEMOP_DEF(iemOp_setnp_Eb)
4181{
4182 IEMOP_MNEMONIC("setnp Eb");
4183 IEMOP_HLP_MIN_386();
4184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4185 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4186
4187 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4188 * any way. AMD says it's "unused", whatever that means. We're
4189 * ignoring for now. */
4190 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4191 {
4192 /* register target */
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4196 } IEM_MC_ELSE() {
4197 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4198 } IEM_MC_ENDIF();
4199 IEM_MC_ADVANCE_RIP();
4200 IEM_MC_END();
4201 }
4202 else
4203 {
4204 /* memory target */
4205 IEM_MC_BEGIN(0, 1);
4206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4208 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4209 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4210 } IEM_MC_ELSE() {
4211 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4212 } IEM_MC_ENDIF();
4213 IEM_MC_ADVANCE_RIP();
4214 IEM_MC_END();
4215 }
4216 return VINF_SUCCESS;
4217}
4218
4219
4220/** Opcode 0x0f 0x9c. */
4221FNIEMOP_DEF(iemOp_setl_Eb)
4222{
4223 IEMOP_MNEMONIC("setl Eb");
4224 IEMOP_HLP_MIN_386();
4225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4226 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4227
4228 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4229 * any way. AMD says it's "unused", whatever that means. We're
4230 * ignoring for now. */
4231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4232 {
4233 /* register target */
4234 IEM_MC_BEGIN(0, 0);
4235 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4236 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4237 } IEM_MC_ELSE() {
4238 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4239 } IEM_MC_ENDIF();
4240 IEM_MC_ADVANCE_RIP();
4241 IEM_MC_END();
4242 }
4243 else
4244 {
4245 /* memory target */
4246 IEM_MC_BEGIN(0, 1);
4247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4249 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4250 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4251 } IEM_MC_ELSE() {
4252 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4253 } IEM_MC_ENDIF();
4254 IEM_MC_ADVANCE_RIP();
4255 IEM_MC_END();
4256 }
4257 return VINF_SUCCESS;
4258}
4259
4260
4261/** Opcode 0x0f 0x9d. */
4262FNIEMOP_DEF(iemOp_setnl_Eb)
4263{
4264 IEMOP_MNEMONIC("setnl Eb");
4265 IEMOP_HLP_MIN_386();
4266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4267 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4268
4269 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4270 * any way. AMD says it's "unused", whatever that means. We're
4271 * ignoring for now. */
4272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4273 {
4274 /* register target */
4275 IEM_MC_BEGIN(0, 0);
4276 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4277 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4278 } IEM_MC_ELSE() {
4279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4280 } IEM_MC_ENDIF();
4281 IEM_MC_ADVANCE_RIP();
4282 IEM_MC_END();
4283 }
4284 else
4285 {
4286 /* memory target */
4287 IEM_MC_BEGIN(0, 1);
4288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4290 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4291 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ELSE() {
4293 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4294 } IEM_MC_ENDIF();
4295 IEM_MC_ADVANCE_RIP();
4296 IEM_MC_END();
4297 }
4298 return VINF_SUCCESS;
4299}
4300
4301
4302/** Opcode 0x0f 0x9e. */
4303FNIEMOP_DEF(iemOp_setle_Eb)
4304{
4305 IEMOP_MNEMONIC("setle Eb");
4306 IEMOP_HLP_MIN_386();
4307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4308 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4309
4310 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4311 * any way. AMD says it's "unused", whatever that means. We're
4312 * ignoring for now. */
4313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4314 {
4315 /* register target */
4316 IEM_MC_BEGIN(0, 0);
4317 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4319 } IEM_MC_ELSE() {
4320 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4321 } IEM_MC_ENDIF();
4322 IEM_MC_ADVANCE_RIP();
4323 IEM_MC_END();
4324 }
4325 else
4326 {
4327 /* memory target */
4328 IEM_MC_BEGIN(0, 1);
4329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4331 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4332 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4333 } IEM_MC_ELSE() {
4334 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4335 } IEM_MC_ENDIF();
4336 IEM_MC_ADVANCE_RIP();
4337 IEM_MC_END();
4338 }
4339 return VINF_SUCCESS;
4340}
4341
4342
4343/** Opcode 0x0f 0x9f. */
4344FNIEMOP_DEF(iemOp_setnle_Eb)
4345{
4346 IEMOP_MNEMONIC("setnle Eb");
4347 IEMOP_HLP_MIN_386();
4348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4349 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4350
4351 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4352 * any way. AMD says it's "unused", whatever that means. We're
4353 * ignoring for now. */
4354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4355 {
4356 /* register target */
4357 IEM_MC_BEGIN(0, 0);
4358 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4359 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4360 } IEM_MC_ELSE() {
4361 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4362 } IEM_MC_ENDIF();
4363 IEM_MC_ADVANCE_RIP();
4364 IEM_MC_END();
4365 }
4366 else
4367 {
4368 /* memory target */
4369 IEM_MC_BEGIN(0, 1);
4370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4372 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/**
4385 * Common 'push segment-register' helper.
4386 */
4387FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4388{
4389 IEMOP_HLP_NO_LOCK_PREFIX();
4390 if (iReg < X86_SREG_FS)
4391 IEMOP_HLP_NO_64BIT();
4392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4393
4394 switch (pIemCpu->enmEffOpSize)
4395 {
4396 case IEMMODE_16BIT:
4397 IEM_MC_BEGIN(0, 1);
4398 IEM_MC_LOCAL(uint16_t, u16Value);
4399 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4400 IEM_MC_PUSH_U16(u16Value);
4401 IEM_MC_ADVANCE_RIP();
4402 IEM_MC_END();
4403 break;
4404
4405 case IEMMODE_32BIT:
4406 IEM_MC_BEGIN(0, 1);
4407 IEM_MC_LOCAL(uint32_t, u32Value);
4408 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4409 IEM_MC_PUSH_U32_SREG(u32Value);
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 break;
4413
4414 case IEMMODE_64BIT:
4415 IEM_MC_BEGIN(0, 1);
4416 IEM_MC_LOCAL(uint64_t, u64Value);
4417 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4418 IEM_MC_PUSH_U64(u64Value);
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 break;
4422 }
4423
4424 return VINF_SUCCESS;
4425}
4426
4427
4428/** Opcode 0x0f 0xa0. */
4429FNIEMOP_DEF(iemOp_push_fs)
4430{
4431 IEMOP_MNEMONIC("push fs");
4432 IEMOP_HLP_MIN_386();
4433 IEMOP_HLP_NO_LOCK_PREFIX();
4434 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4435}
4436
4437
4438/** Opcode 0x0f 0xa1. */
4439FNIEMOP_DEF(iemOp_pop_fs)
4440{
4441 IEMOP_MNEMONIC("pop fs");
4442 IEMOP_HLP_MIN_386();
4443 IEMOP_HLP_NO_LOCK_PREFIX();
4444 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4445}
4446
4447
4448/** Opcode 0x0f 0xa2. */
4449FNIEMOP_DEF(iemOp_cpuid)
4450{
4451 IEMOP_MNEMONIC("cpuid");
4452 IEMOP_HLP_MIN_486(); /* not all 486es. */
4453 IEMOP_HLP_NO_LOCK_PREFIX();
4454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4455}
4456
4457
4458/**
4459 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4460 * iemOp_bts_Ev_Gv.
4461 */
4462FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4463{
4464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4466
4467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4468 {
4469 /* register destination. */
4470 IEMOP_HLP_NO_LOCK_PREFIX();
4471 switch (pIemCpu->enmEffOpSize)
4472 {
4473 case IEMMODE_16BIT:
4474 IEM_MC_BEGIN(3, 0);
4475 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4476 IEM_MC_ARG(uint16_t, u16Src, 1);
4477 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4478
4479 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4480 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4481 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4482 IEM_MC_REF_EFLAGS(pEFlags);
4483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4484
4485 IEM_MC_ADVANCE_RIP();
4486 IEM_MC_END();
4487 return VINF_SUCCESS;
4488
4489 case IEMMODE_32BIT:
4490 IEM_MC_BEGIN(3, 0);
4491 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4492 IEM_MC_ARG(uint32_t, u32Src, 1);
4493 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4494
4495 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4496 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4497 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4498 IEM_MC_REF_EFLAGS(pEFlags);
4499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4500
4501 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4502 IEM_MC_ADVANCE_RIP();
4503 IEM_MC_END();
4504 return VINF_SUCCESS;
4505
4506 case IEMMODE_64BIT:
4507 IEM_MC_BEGIN(3, 0);
4508 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4509 IEM_MC_ARG(uint64_t, u64Src, 1);
4510 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4511
4512 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4513 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4514 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4515 IEM_MC_REF_EFLAGS(pEFlags);
4516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4517
4518 IEM_MC_ADVANCE_RIP();
4519 IEM_MC_END();
4520 return VINF_SUCCESS;
4521
4522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4523 }
4524 }
4525 else
4526 {
4527 /* memory destination. */
4528
4529 uint32_t fAccess;
4530 if (pImpl->pfnLockedU16)
4531 fAccess = IEM_ACCESS_DATA_RW;
4532 else /* BT */
4533 {
4534 IEMOP_HLP_NO_LOCK_PREFIX();
4535 fAccess = IEM_ACCESS_DATA_R;
4536 }
4537
4538 NOREF(fAccess);
4539
4540 /** @todo test negative bit offsets! */
4541 switch (pIemCpu->enmEffOpSize)
4542 {
4543 case IEMMODE_16BIT:
4544 IEM_MC_BEGIN(3, 2);
4545 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4546 IEM_MC_ARG(uint16_t, u16Src, 1);
4547 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4549 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4550
4551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4552 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4553 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4554 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4555 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4556 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4557 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4558 IEM_MC_FETCH_EFLAGS(EFlags);
4559
4560 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4561 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4563 else
4564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4566
4567 IEM_MC_COMMIT_EFLAGS(EFlags);
4568 IEM_MC_ADVANCE_RIP();
4569 IEM_MC_END();
4570 return VINF_SUCCESS;
4571
4572 case IEMMODE_32BIT:
4573 IEM_MC_BEGIN(3, 2);
4574 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4575 IEM_MC_ARG(uint32_t, u32Src, 1);
4576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4578 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4579
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4582 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4583 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4584 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4585 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4586 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4587 IEM_MC_FETCH_EFLAGS(EFlags);
4588
4589 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4590 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4591 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4592 else
4593 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4594 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4595
4596 IEM_MC_COMMIT_EFLAGS(EFlags);
4597 IEM_MC_ADVANCE_RIP();
4598 IEM_MC_END();
4599 return VINF_SUCCESS;
4600
4601 case IEMMODE_64BIT:
4602 IEM_MC_BEGIN(3, 2);
4603 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4604 IEM_MC_ARG(uint64_t, u64Src, 1);
4605 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4607 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4608
4609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4610 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4611 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4612 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4613 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4614 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4615 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4616 IEM_MC_FETCH_EFLAGS(EFlags);
4617
4618 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4619 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4620 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4621 else
4622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4624
4625 IEM_MC_COMMIT_EFLAGS(EFlags);
4626 IEM_MC_ADVANCE_RIP();
4627 IEM_MC_END();
4628 return VINF_SUCCESS;
4629
4630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4631 }
4632 }
4633}
4634
4635
4636/** Opcode 0x0f 0xa3. */
4637FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4638{
4639 IEMOP_MNEMONIC("bt Gv,Gv");
4640 IEMOP_HLP_MIN_386();
4641 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4642}
4643
4644
4645/**
4646 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4647 */
4648FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4649{
4650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4651 IEMOP_HLP_NO_LOCK_PREFIX();
4652 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4653
4654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4655 {
4656 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4657 IEMOP_HLP_NO_LOCK_PREFIX();
4658
4659 switch (pIemCpu->enmEffOpSize)
4660 {
4661 case IEMMODE_16BIT:
4662 IEM_MC_BEGIN(4, 0);
4663 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4664 IEM_MC_ARG(uint16_t, u16Src, 1);
4665 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4666 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4667
4668 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4669 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4670 IEM_MC_REF_EFLAGS(pEFlags);
4671 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4672
4673 IEM_MC_ADVANCE_RIP();
4674 IEM_MC_END();
4675 return VINF_SUCCESS;
4676
4677 case IEMMODE_32BIT:
4678 IEM_MC_BEGIN(4, 0);
4679 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4680 IEM_MC_ARG(uint32_t, u32Src, 1);
4681 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4682 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4683
4684 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4685 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4686 IEM_MC_REF_EFLAGS(pEFlags);
4687 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4688
4689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4690 IEM_MC_ADVANCE_RIP();
4691 IEM_MC_END();
4692 return VINF_SUCCESS;
4693
4694 case IEMMODE_64BIT:
4695 IEM_MC_BEGIN(4, 0);
4696 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4697 IEM_MC_ARG(uint64_t, u64Src, 1);
4698 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4699 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4700
4701 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4702 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4703 IEM_MC_REF_EFLAGS(pEFlags);
4704 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4705
4706 IEM_MC_ADVANCE_RIP();
4707 IEM_MC_END();
4708 return VINF_SUCCESS;
4709
4710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4711 }
4712 }
4713 else
4714 {
4715 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4716
4717 switch (pIemCpu->enmEffOpSize)
4718 {
4719 case IEMMODE_16BIT:
4720 IEM_MC_BEGIN(4, 2);
4721 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4722 IEM_MC_ARG(uint16_t, u16Src, 1);
4723 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4724 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4726
4727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4728 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4729 IEM_MC_ASSIGN(cShiftArg, cShift);
4730 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4731 IEM_MC_FETCH_EFLAGS(EFlags);
4732 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4733 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4734
4735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4736 IEM_MC_COMMIT_EFLAGS(EFlags);
4737 IEM_MC_ADVANCE_RIP();
4738 IEM_MC_END();
4739 return VINF_SUCCESS;
4740
4741 case IEMMODE_32BIT:
4742 IEM_MC_BEGIN(4, 2);
4743 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4744 IEM_MC_ARG(uint32_t, u32Src, 1);
4745 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748
4749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4750 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4751 IEM_MC_ASSIGN(cShiftArg, cShift);
4752 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4753 IEM_MC_FETCH_EFLAGS(EFlags);
4754 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4755 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4756
4757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4758 IEM_MC_COMMIT_EFLAGS(EFlags);
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 return VINF_SUCCESS;
4762
4763 case IEMMODE_64BIT:
4764 IEM_MC_BEGIN(4, 2);
4765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4766 IEM_MC_ARG(uint64_t, u64Src, 1);
4767 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4770
4771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4772 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4773 IEM_MC_ASSIGN(cShiftArg, cShift);
4774 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4775 IEM_MC_FETCH_EFLAGS(EFlags);
4776 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4777 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4778
4779 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4780 IEM_MC_COMMIT_EFLAGS(EFlags);
4781 IEM_MC_ADVANCE_RIP();
4782 IEM_MC_END();
4783 return VINF_SUCCESS;
4784
4785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4786 }
4787 }
4788}
4789
4790
4791/**
4792 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4793 */
4794FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4795{
4796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4797 IEMOP_HLP_NO_LOCK_PREFIX();
4798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4799
4800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4801 {
4802 IEMOP_HLP_NO_LOCK_PREFIX();
4803
4804 switch (pIemCpu->enmEffOpSize)
4805 {
4806 case IEMMODE_16BIT:
4807 IEM_MC_BEGIN(4, 0);
4808 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4809 IEM_MC_ARG(uint16_t, u16Src, 1);
4810 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4811 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4812
4813 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4814 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4815 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4816 IEM_MC_REF_EFLAGS(pEFlags);
4817 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4818
4819 IEM_MC_ADVANCE_RIP();
4820 IEM_MC_END();
4821 return VINF_SUCCESS;
4822
4823 case IEMMODE_32BIT:
4824 IEM_MC_BEGIN(4, 0);
4825 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4826 IEM_MC_ARG(uint32_t, u32Src, 1);
4827 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4828 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4829
4830 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4831 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4832 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4833 IEM_MC_REF_EFLAGS(pEFlags);
4834 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4835
4836 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4837 IEM_MC_ADVANCE_RIP();
4838 IEM_MC_END();
4839 return VINF_SUCCESS;
4840
4841 case IEMMODE_64BIT:
4842 IEM_MC_BEGIN(4, 0);
4843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4844 IEM_MC_ARG(uint64_t, u64Src, 1);
4845 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4846 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4847
4848 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4849 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4850 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4851 IEM_MC_REF_EFLAGS(pEFlags);
4852 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4853
4854 IEM_MC_ADVANCE_RIP();
4855 IEM_MC_END();
4856 return VINF_SUCCESS;
4857
4858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4859 }
4860 }
4861 else
4862 {
4863 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4864
4865 switch (pIemCpu->enmEffOpSize)
4866 {
4867 case IEMMODE_16BIT:
4868 IEM_MC_BEGIN(4, 2);
4869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4870 IEM_MC_ARG(uint16_t, u16Src, 1);
4871 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4874
4875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4876 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4877 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4878 IEM_MC_FETCH_EFLAGS(EFlags);
4879 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4880 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4881
4882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4883 IEM_MC_COMMIT_EFLAGS(EFlags);
4884 IEM_MC_ADVANCE_RIP();
4885 IEM_MC_END();
4886 return VINF_SUCCESS;
4887
4888 case IEMMODE_32BIT:
4889 IEM_MC_BEGIN(4, 2);
4890 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4891 IEM_MC_ARG(uint32_t, u32Src, 1);
4892 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4893 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4895
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4897 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4898 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4899 IEM_MC_FETCH_EFLAGS(EFlags);
4900 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4901 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4902
4903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4904 IEM_MC_COMMIT_EFLAGS(EFlags);
4905 IEM_MC_ADVANCE_RIP();
4906 IEM_MC_END();
4907 return VINF_SUCCESS;
4908
4909 case IEMMODE_64BIT:
4910 IEM_MC_BEGIN(4, 2);
4911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4912 IEM_MC_ARG(uint64_t, u64Src, 1);
4913 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4916
4917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4918 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4919 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4920 IEM_MC_FETCH_EFLAGS(EFlags);
4921 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4922 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4923
4924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4925 IEM_MC_COMMIT_EFLAGS(EFlags);
4926 IEM_MC_ADVANCE_RIP();
4927 IEM_MC_END();
4928 return VINF_SUCCESS;
4929
4930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4931 }
4932 }
4933}
4934
4935
4936
4937/** Opcode 0x0f 0xa4. */
4938FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4939{
4940 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4941 IEMOP_HLP_MIN_386();
4942 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4943}
4944
4945
4946/** Opcode 0x0f 0xa5. */
4947FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4948{
4949 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4950 IEMOP_HLP_MIN_386();
4951 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4952}
4953
4954
4955/** Opcode 0x0f 0xa8. */
4956FNIEMOP_DEF(iemOp_push_gs)
4957{
4958 IEMOP_MNEMONIC("push gs");
4959 IEMOP_HLP_MIN_386();
4960 IEMOP_HLP_NO_LOCK_PREFIX();
4961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4962}
4963
4964
4965/** Opcode 0x0f 0xa9. */
4966FNIEMOP_DEF(iemOp_pop_gs)
4967{
4968 IEMOP_MNEMONIC("pop gs");
4969 IEMOP_HLP_MIN_386();
4970 IEMOP_HLP_NO_LOCK_PREFIX();
4971 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4972}
4973
4974
4975/** Opcode 0x0f 0xaa. */
4976FNIEMOP_STUB(iemOp_rsm);
4977//IEMOP_HLP_MIN_386();
4978
4979
4980/** Opcode 0x0f 0xab. */
4981FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4982{
4983 IEMOP_MNEMONIC("bts Ev,Gv");
4984 IEMOP_HLP_MIN_386();
4985 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4986}
4987
4988
4989/** Opcode 0x0f 0xac. */
4990FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4991{
4992 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4993 IEMOP_HLP_MIN_386();
4994 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4995}
4996
4997
4998/** Opcode 0x0f 0xad. */
4999FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5000{
5001 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5002 IEMOP_HLP_MIN_386();
5003 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5004}
5005
5006
5007/** Opcode 0x0f 0xae mem/0. */
5008FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5009{
5010 IEMOP_MNEMONIC("fxsave m512");
5011 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5012 return IEMOP_RAISE_INVALID_OPCODE();
5013
5014 IEM_MC_BEGIN(3, 1);
5015 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5016 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5017 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5020 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5021 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5022 IEM_MC_END();
5023 return VINF_SUCCESS;
5024}
5025
5026
5027/** Opcode 0x0f 0xae mem/1. */
5028FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5029{
5030 IEMOP_MNEMONIC("fxrstor m512");
5031 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5032 return IEMOP_RAISE_INVALID_OPCODE();
5033
5034 IEM_MC_BEGIN(3, 1);
5035 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5036 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5037 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5040 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5041 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5042 IEM_MC_END();
5043 return VINF_SUCCESS;
5044}
5045
5046
5047/** Opcode 0x0f 0xae mem/2. */
5048FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5049
5050/** Opcode 0x0f 0xae mem/3. */
5051FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5052
5053/** Opcode 0x0f 0xae mem/4. */
5054FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5055
5056/** Opcode 0x0f 0xae mem/5. */
5057FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5058
5059/** Opcode 0x0f 0xae mem/6. */
5060FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5061
5062/** Opcode 0x0f 0xae mem/7. */
5063FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5064
5065
5066/** Opcode 0x0f 0xae 11b/5. */
5067FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5068{
5069 IEMOP_MNEMONIC("lfence");
5070 IEMOP_HLP_NO_LOCK_PREFIX();
5071 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5072 return IEMOP_RAISE_INVALID_OPCODE();
5073
5074 IEM_MC_BEGIN(0, 0);
5075 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5076 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5077 else
5078 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5079 IEM_MC_ADVANCE_RIP();
5080 IEM_MC_END();
5081 return VINF_SUCCESS;
5082}
5083
5084
5085/** Opcode 0x0f 0xae 11b/6. */
5086FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5087{
5088 IEMOP_MNEMONIC("mfence");
5089 IEMOP_HLP_NO_LOCK_PREFIX();
5090 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5091 return IEMOP_RAISE_INVALID_OPCODE();
5092
5093 IEM_MC_BEGIN(0, 0);
5094 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5095 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5096 else
5097 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5098 IEM_MC_ADVANCE_RIP();
5099 IEM_MC_END();
5100 return VINF_SUCCESS;
5101}
5102
5103
5104/** Opcode 0x0f 0xae 11b/7. */
5105FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5106{
5107 IEMOP_MNEMONIC("sfence");
5108 IEMOP_HLP_NO_LOCK_PREFIX();
5109 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5110 return IEMOP_RAISE_INVALID_OPCODE();
5111
5112 IEM_MC_BEGIN(0, 0);
5113 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5114 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5115 else
5116 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5117 IEM_MC_ADVANCE_RIP();
5118 IEM_MC_END();
5119 return VINF_SUCCESS;
5120}
5121
5122
5123/** Opcode 0xf3 0x0f 0xae 11b/0. */
5124FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5125
5126/** Opcode 0xf3 0x0f 0xae 11b/1. */
5127FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5128
5129/** Opcode 0xf3 0x0f 0xae 11b/2. */
5130FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5131
5132/** Opcode 0xf3 0x0f 0xae 11b/3. */
5133FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5134
5135
5136/** Opcode 0x0f 0xae. */
5137FNIEMOP_DEF(iemOp_Grp15)
5138{
5139 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5141 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5142 {
5143 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5144 {
5145 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5146 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5147 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5148 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5149 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5150 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5151 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5152 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5154 }
5155 }
5156 else
5157 {
5158 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5159 {
5160 case 0:
5161 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5162 {
5163 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5164 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5165 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5166 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5167 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5168 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5169 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5170 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5172 }
5173 break;
5174
5175 case IEM_OP_PRF_REPZ:
5176 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5177 {
5178 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5179 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5180 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5181 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5182 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5183 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5184 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5185 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5187 }
5188 break;
5189
5190 default:
5191 return IEMOP_RAISE_INVALID_OPCODE();
5192 }
5193 }
5194}
5195
5196
5197/** Opcode 0x0f 0xaf. */
5198FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5199{
5200 IEMOP_MNEMONIC("imul Gv,Ev");
5201 IEMOP_HLP_MIN_386();
5202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5203 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5204}
5205
5206
5207/** Opcode 0x0f 0xb0. */
5208FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5209{
5210 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5211 IEMOP_HLP_MIN_486();
5212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5213
5214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5215 {
5216 IEMOP_HLP_DONE_DECODING();
5217 IEM_MC_BEGIN(4, 0);
5218 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5219 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5220 IEM_MC_ARG(uint8_t, u8Src, 2);
5221 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5222
5223 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5224 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5225 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5226 IEM_MC_REF_EFLAGS(pEFlags);
5227 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5228 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5229 else
5230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5231
5232 IEM_MC_ADVANCE_RIP();
5233 IEM_MC_END();
5234 }
5235 else
5236 {
5237 IEM_MC_BEGIN(4, 3);
5238 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5239 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5240 IEM_MC_ARG(uint8_t, u8Src, 2);
5241 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5243 IEM_MC_LOCAL(uint8_t, u8Al);
5244
5245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5246 IEMOP_HLP_DONE_DECODING();
5247 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5248 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5249 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5250 IEM_MC_FETCH_EFLAGS(EFlags);
5251 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5252 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5254 else
5255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5256
5257 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5258 IEM_MC_COMMIT_EFLAGS(EFlags);
5259 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5260 IEM_MC_ADVANCE_RIP();
5261 IEM_MC_END();
5262 }
5263 return VINF_SUCCESS;
5264}
5265
5266/** Opcode 0x0f 0xb1. */
5267FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5268{
5269 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5270 IEMOP_HLP_MIN_486();
5271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5272
5273 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5274 {
5275 IEMOP_HLP_DONE_DECODING();
5276 switch (pIemCpu->enmEffOpSize)
5277 {
5278 case IEMMODE_16BIT:
5279 IEM_MC_BEGIN(4, 0);
5280 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5281 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5282 IEM_MC_ARG(uint16_t, u16Src, 2);
5283 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5284
5285 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5286 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5287 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5288 IEM_MC_REF_EFLAGS(pEFlags);
5289 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5290 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5291 else
5292 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5293
5294 IEM_MC_ADVANCE_RIP();
5295 IEM_MC_END();
5296 return VINF_SUCCESS;
5297
5298 case IEMMODE_32BIT:
5299 IEM_MC_BEGIN(4, 0);
5300 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5301 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5302 IEM_MC_ARG(uint32_t, u32Src, 2);
5303 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5304
5305 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5306 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5307 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5308 IEM_MC_REF_EFLAGS(pEFlags);
5309 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5310 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5311 else
5312 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5313
5314 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5315 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5316 IEM_MC_ADVANCE_RIP();
5317 IEM_MC_END();
5318 return VINF_SUCCESS;
5319
5320 case IEMMODE_64BIT:
5321 IEM_MC_BEGIN(4, 0);
5322 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5323 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5324#ifdef RT_ARCH_X86
5325 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5326#else
5327 IEM_MC_ARG(uint64_t, u64Src, 2);
5328#endif
5329 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5330
5331 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5332 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5333 IEM_MC_REF_EFLAGS(pEFlags);
5334#ifdef RT_ARCH_X86
5335 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5336 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5338 else
5339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5340#else
5341 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5342 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5343 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5344 else
5345 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5346#endif
5347
5348 IEM_MC_ADVANCE_RIP();
5349 IEM_MC_END();
5350 return VINF_SUCCESS;
5351
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354 }
5355 else
5356 {
5357 switch (pIemCpu->enmEffOpSize)
5358 {
5359 case IEMMODE_16BIT:
5360 IEM_MC_BEGIN(4, 3);
5361 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5362 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5363 IEM_MC_ARG(uint16_t, u16Src, 2);
5364 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5366 IEM_MC_LOCAL(uint16_t, u16Ax);
5367
5368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5369 IEMOP_HLP_DONE_DECODING();
5370 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5371 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5372 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5373 IEM_MC_FETCH_EFLAGS(EFlags);
5374 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5375 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5376 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5377 else
5378 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5379
5380 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5381 IEM_MC_COMMIT_EFLAGS(EFlags);
5382 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5383 IEM_MC_ADVANCE_RIP();
5384 IEM_MC_END();
5385 return VINF_SUCCESS;
5386
5387 case IEMMODE_32BIT:
5388 IEM_MC_BEGIN(4, 3);
5389 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5390 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5391 IEM_MC_ARG(uint32_t, u32Src, 2);
5392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5394 IEM_MC_LOCAL(uint32_t, u32Eax);
5395
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING();
5398 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5399 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5400 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5401 IEM_MC_FETCH_EFLAGS(EFlags);
5402 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5403 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5404 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5405 else
5406 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5407
5408 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5409 IEM_MC_COMMIT_EFLAGS(EFlags);
5410 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5411 IEM_MC_ADVANCE_RIP();
5412 IEM_MC_END();
5413 return VINF_SUCCESS;
5414
5415 case IEMMODE_64BIT:
5416 IEM_MC_BEGIN(4, 3);
5417 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5418 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5419#ifdef RT_ARCH_X86
5420 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5421#else
5422 IEM_MC_ARG(uint64_t, u64Src, 2);
5423#endif
5424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5426 IEM_MC_LOCAL(uint64_t, u64Rax);
5427
5428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5429 IEMOP_HLP_DONE_DECODING();
5430 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5431 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5432 IEM_MC_FETCH_EFLAGS(EFlags);
5433 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5434#ifdef RT_ARCH_X86
5435 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5436 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5437 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5438 else
5439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5440#else
5441 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5442 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5444 else
5445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5446#endif
5447
5448 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5449 IEM_MC_COMMIT_EFLAGS(EFlags);
5450 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5451 IEM_MC_ADVANCE_RIP();
5452 IEM_MC_END();
5453 return VINF_SUCCESS;
5454
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457 }
5458}
5459
5460
5461FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5462{
5463 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5464 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5465
5466 switch (pIemCpu->enmEffOpSize)
5467 {
5468 case IEMMODE_16BIT:
5469 IEM_MC_BEGIN(5, 1);
5470 IEM_MC_ARG(uint16_t, uSel, 0);
5471 IEM_MC_ARG(uint16_t, offSeg, 1);
5472 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5473 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5474 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5475 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5478 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5479 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5480 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5481 IEM_MC_END();
5482 return VINF_SUCCESS;
5483
5484 case IEMMODE_32BIT:
5485 IEM_MC_BEGIN(5, 1);
5486 IEM_MC_ARG(uint16_t, uSel, 0);
5487 IEM_MC_ARG(uint32_t, offSeg, 1);
5488 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5489 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5490 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5491 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5495 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5496 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5497 IEM_MC_END();
5498 return VINF_SUCCESS;
5499
5500 case IEMMODE_64BIT:
5501 IEM_MC_BEGIN(5, 1);
5502 IEM_MC_ARG(uint16_t, uSel, 0);
5503 IEM_MC_ARG(uint64_t, offSeg, 1);
5504 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5505 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5506 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5507 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5511 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5512 else
5513 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5514 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5515 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5516 IEM_MC_END();
5517 return VINF_SUCCESS;
5518
5519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5520 }
5521}
5522
5523
5524/** Opcode 0x0f 0xb2. */
5525FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5526{
5527 IEMOP_MNEMONIC("lss Gv,Mp");
5528 IEMOP_HLP_MIN_386();
5529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5531 return IEMOP_RAISE_INVALID_OPCODE();
5532 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5533}
5534
5535
5536/** Opcode 0x0f 0xb3. */
5537FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5538{
5539 IEMOP_MNEMONIC("btr Ev,Gv");
5540 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5541}
5542
5543
5544/** Opcode 0x0f 0xb4. */
5545FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5546{
5547 IEMOP_MNEMONIC("lfs Gv,Mp");
5548 IEMOP_HLP_MIN_386();
5549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5551 return IEMOP_RAISE_INVALID_OPCODE();
5552 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5553}
5554
5555
5556/** Opcode 0x0f 0xb5. */
5557FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5558{
5559 IEMOP_MNEMONIC("lgs Gv,Mp");
5560 IEMOP_HLP_MIN_386();
5561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5563 return IEMOP_RAISE_INVALID_OPCODE();
5564 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5565}
5566
5567
5568/** Opcode 0x0f 0xb6. */
5569FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5570{
5571 IEMOP_MNEMONIC("movzx Gv,Eb");
5572 IEMOP_HLP_MIN_386();
5573
5574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5575 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5576
5577 /*
5578 * If rm is denoting a register, no more instruction bytes.
5579 */
5580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5581 {
5582 switch (pIemCpu->enmEffOpSize)
5583 {
5584 case IEMMODE_16BIT:
5585 IEM_MC_BEGIN(0, 1);
5586 IEM_MC_LOCAL(uint16_t, u16Value);
5587 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5588 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5589 IEM_MC_ADVANCE_RIP();
5590 IEM_MC_END();
5591 return VINF_SUCCESS;
5592
5593 case IEMMODE_32BIT:
5594 IEM_MC_BEGIN(0, 1);
5595 IEM_MC_LOCAL(uint32_t, u32Value);
5596 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5597 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 return VINF_SUCCESS;
5601
5602 case IEMMODE_64BIT:
5603 IEM_MC_BEGIN(0, 1);
5604 IEM_MC_LOCAL(uint64_t, u64Value);
5605 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5606 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5607 IEM_MC_ADVANCE_RIP();
5608 IEM_MC_END();
5609 return VINF_SUCCESS;
5610
5611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5612 }
5613 }
5614 else
5615 {
5616 /*
5617 * We're loading a register from memory.
5618 */
5619 switch (pIemCpu->enmEffOpSize)
5620 {
5621 case IEMMODE_16BIT:
5622 IEM_MC_BEGIN(0, 2);
5623 IEM_MC_LOCAL(uint16_t, u16Value);
5624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5626 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5627 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 return VINF_SUCCESS;
5631
5632 case IEMMODE_32BIT:
5633 IEM_MC_BEGIN(0, 2);
5634 IEM_MC_LOCAL(uint32_t, u32Value);
5635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5637 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5638 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5639 IEM_MC_ADVANCE_RIP();
5640 IEM_MC_END();
5641 return VINF_SUCCESS;
5642
5643 case IEMMODE_64BIT:
5644 IEM_MC_BEGIN(0, 2);
5645 IEM_MC_LOCAL(uint64_t, u64Value);
5646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5648 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5649 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5650 IEM_MC_ADVANCE_RIP();
5651 IEM_MC_END();
5652 return VINF_SUCCESS;
5653
5654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5655 }
5656 }
5657}
5658
5659
5660/** Opcode 0x0f 0xb7. */
5661FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5662{
5663 IEMOP_MNEMONIC("movzx Gv,Ew");
5664 IEMOP_HLP_MIN_386();
5665
5666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5667 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5668
5669 /** @todo Not entirely sure how the operand size prefix is handled here,
5670 * assuming that it will be ignored. Would be nice to have a few
5671 * test for this. */
5672 /*
5673 * If rm is denoting a register, no more instruction bytes.
5674 */
5675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5676 {
5677 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5678 {
5679 IEM_MC_BEGIN(0, 1);
5680 IEM_MC_LOCAL(uint32_t, u32Value);
5681 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5682 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 }
5686 else
5687 {
5688 IEM_MC_BEGIN(0, 1);
5689 IEM_MC_LOCAL(uint64_t, u64Value);
5690 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5691 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5692 IEM_MC_ADVANCE_RIP();
5693 IEM_MC_END();
5694 }
5695 }
5696 else
5697 {
5698 /*
5699 * We're loading a register from memory.
5700 */
5701 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5702 {
5703 IEM_MC_BEGIN(0, 2);
5704 IEM_MC_LOCAL(uint32_t, u32Value);
5705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5707 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5708 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5709 IEM_MC_ADVANCE_RIP();
5710 IEM_MC_END();
5711 }
5712 else
5713 {
5714 IEM_MC_BEGIN(0, 2);
5715 IEM_MC_LOCAL(uint64_t, u64Value);
5716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5718 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5719 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5720 IEM_MC_ADVANCE_RIP();
5721 IEM_MC_END();
5722 }
5723 }
5724 return VINF_SUCCESS;
5725}
5726
5727
5728/** Opcode 0x0f 0xb8. */
5729FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5730
5731
5732/** Opcode 0x0f 0xb9. */
5733FNIEMOP_DEF(iemOp_Grp10)
5734{
5735 Log(("iemOp_Grp10 -> #UD\n"));
5736 return IEMOP_RAISE_INVALID_OPCODE();
5737}
5738
5739
5740/** Opcode 0x0f 0xba. */
5741FNIEMOP_DEF(iemOp_Grp8)
5742{
5743 IEMOP_HLP_MIN_386();
5744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5745 PCIEMOPBINSIZES pImpl;
5746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5747 {
5748 case 0: case 1: case 2: case 3:
5749 return IEMOP_RAISE_INVALID_OPCODE();
5750 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5751 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5752 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5753 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5755 }
5756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5757
5758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5759 {
5760 /* register destination. */
5761 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5762 IEMOP_HLP_NO_LOCK_PREFIX();
5763
5764 switch (pIemCpu->enmEffOpSize)
5765 {
5766 case IEMMODE_16BIT:
5767 IEM_MC_BEGIN(3, 0);
5768 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5769 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5770 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5771
5772 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5773 IEM_MC_REF_EFLAGS(pEFlags);
5774 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5775
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 return VINF_SUCCESS;
5779
5780 case IEMMODE_32BIT:
5781 IEM_MC_BEGIN(3, 0);
5782 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5783 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5784 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5785
5786 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5787 IEM_MC_REF_EFLAGS(pEFlags);
5788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5789
5790 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5791 IEM_MC_ADVANCE_RIP();
5792 IEM_MC_END();
5793 return VINF_SUCCESS;
5794
5795 case IEMMODE_64BIT:
5796 IEM_MC_BEGIN(3, 0);
5797 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5798 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5799 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5800
5801 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5802 IEM_MC_REF_EFLAGS(pEFlags);
5803 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5804
5805 IEM_MC_ADVANCE_RIP();
5806 IEM_MC_END();
5807 return VINF_SUCCESS;
5808
5809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5810 }
5811 }
5812 else
5813 {
5814 /* memory destination. */
5815
5816 uint32_t fAccess;
5817 if (pImpl->pfnLockedU16)
5818 fAccess = IEM_ACCESS_DATA_RW;
5819 else /* BT */
5820 {
5821 IEMOP_HLP_NO_LOCK_PREFIX();
5822 fAccess = IEM_ACCESS_DATA_R;
5823 }
5824
5825 /** @todo test negative bit offsets! */
5826 switch (pIemCpu->enmEffOpSize)
5827 {
5828 case IEMMODE_16BIT:
5829 IEM_MC_BEGIN(3, 1);
5830 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5831 IEM_MC_ARG(uint16_t, u16Src, 1);
5832 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5834
5835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5836 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5837 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5838 IEM_MC_FETCH_EFLAGS(EFlags);
5839 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5840 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5842 else
5843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5844 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5845
5846 IEM_MC_COMMIT_EFLAGS(EFlags);
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 return VINF_SUCCESS;
5850
5851 case IEMMODE_32BIT:
5852 IEM_MC_BEGIN(3, 1);
5853 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5854 IEM_MC_ARG(uint32_t, u32Src, 1);
5855 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5857
5858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5859 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5860 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5861 IEM_MC_FETCH_EFLAGS(EFlags);
5862 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5863 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5865 else
5866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5867 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5868
5869 IEM_MC_COMMIT_EFLAGS(EFlags);
5870 IEM_MC_ADVANCE_RIP();
5871 IEM_MC_END();
5872 return VINF_SUCCESS;
5873
5874 case IEMMODE_64BIT:
5875 IEM_MC_BEGIN(3, 1);
5876 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5877 IEM_MC_ARG(uint64_t, u64Src, 1);
5878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5880
5881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5882 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5883 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5884 IEM_MC_FETCH_EFLAGS(EFlags);
5885 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5886 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5888 else
5889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5890 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5891
5892 IEM_MC_COMMIT_EFLAGS(EFlags);
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 return VINF_SUCCESS;
5896
5897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5898 }
5899 }
5900
5901}
5902
5903
5904/** Opcode 0x0f 0xbb. */
5905FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5906{
5907 IEMOP_MNEMONIC("btc Ev,Gv");
5908 IEMOP_HLP_MIN_386();
5909 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5910}
5911
5912
5913/** Opcode 0x0f 0xbc. */
5914FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5915{
5916 IEMOP_MNEMONIC("bsf Gv,Ev");
5917 IEMOP_HLP_MIN_386();
5918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5919 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5920}
5921
5922
5923/** Opcode 0x0f 0xbd. */
5924FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5925{
5926 IEMOP_MNEMONIC("bsr Gv,Ev");
5927 IEMOP_HLP_MIN_386();
5928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5929 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5930}
5931
5932
5933/** Opcode 0x0f 0xbe. */
5934FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5935{
5936 IEMOP_MNEMONIC("movsx Gv,Eb");
5937 IEMOP_HLP_MIN_386();
5938
5939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5940 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5941
5942 /*
5943 * If rm is denoting a register, no more instruction bytes.
5944 */
5945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5946 {
5947 switch (pIemCpu->enmEffOpSize)
5948 {
5949 case IEMMODE_16BIT:
5950 IEM_MC_BEGIN(0, 1);
5951 IEM_MC_LOCAL(uint16_t, u16Value);
5952 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5953 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5954 IEM_MC_ADVANCE_RIP();
5955 IEM_MC_END();
5956 return VINF_SUCCESS;
5957
5958 case IEMMODE_32BIT:
5959 IEM_MC_BEGIN(0, 1);
5960 IEM_MC_LOCAL(uint32_t, u32Value);
5961 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5962 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5963 IEM_MC_ADVANCE_RIP();
5964 IEM_MC_END();
5965 return VINF_SUCCESS;
5966
5967 case IEMMODE_64BIT:
5968 IEM_MC_BEGIN(0, 1);
5969 IEM_MC_LOCAL(uint64_t, u64Value);
5970 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5971 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5972 IEM_MC_ADVANCE_RIP();
5973 IEM_MC_END();
5974 return VINF_SUCCESS;
5975
5976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5977 }
5978 }
5979 else
5980 {
5981 /*
5982 * We're loading a register from memory.
5983 */
5984 switch (pIemCpu->enmEffOpSize)
5985 {
5986 case IEMMODE_16BIT:
5987 IEM_MC_BEGIN(0, 2);
5988 IEM_MC_LOCAL(uint16_t, u16Value);
5989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5991 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5992 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5993 IEM_MC_ADVANCE_RIP();
5994 IEM_MC_END();
5995 return VINF_SUCCESS;
5996
5997 case IEMMODE_32BIT:
5998 IEM_MC_BEGIN(0, 2);
5999 IEM_MC_LOCAL(uint32_t, u32Value);
6000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6002 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6003 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6004 IEM_MC_ADVANCE_RIP();
6005 IEM_MC_END();
6006 return VINF_SUCCESS;
6007
6008 case IEMMODE_64BIT:
6009 IEM_MC_BEGIN(0, 2);
6010 IEM_MC_LOCAL(uint64_t, u64Value);
6011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6013 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6014 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6015 IEM_MC_ADVANCE_RIP();
6016 IEM_MC_END();
6017 return VINF_SUCCESS;
6018
6019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6020 }
6021 }
6022}
6023
6024
6025/** Opcode 0x0f 0xbf. */
6026FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6027{
6028 IEMOP_MNEMONIC("movsx Gv,Ew");
6029 IEMOP_HLP_MIN_386();
6030
6031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6032 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6033
6034 /** @todo Not entirely sure how the operand size prefix is handled here,
6035 * assuming that it will be ignored. Would be nice to have a few
6036 * test for this. */
6037 /*
6038 * If rm is denoting a register, no more instruction bytes.
6039 */
6040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6041 {
6042 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6043 {
6044 IEM_MC_BEGIN(0, 1);
6045 IEM_MC_LOCAL(uint32_t, u32Value);
6046 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6047 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6048 IEM_MC_ADVANCE_RIP();
6049 IEM_MC_END();
6050 }
6051 else
6052 {
6053 IEM_MC_BEGIN(0, 1);
6054 IEM_MC_LOCAL(uint64_t, u64Value);
6055 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6056 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6057 IEM_MC_ADVANCE_RIP();
6058 IEM_MC_END();
6059 }
6060 }
6061 else
6062 {
6063 /*
6064 * We're loading a register from memory.
6065 */
6066 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6067 {
6068 IEM_MC_BEGIN(0, 2);
6069 IEM_MC_LOCAL(uint32_t, u32Value);
6070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6072 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6073 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6074 IEM_MC_ADVANCE_RIP();
6075 IEM_MC_END();
6076 }
6077 else
6078 {
6079 IEM_MC_BEGIN(0, 2);
6080 IEM_MC_LOCAL(uint64_t, u64Value);
6081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6083 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6084 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6085 IEM_MC_ADVANCE_RIP();
6086 IEM_MC_END();
6087 }
6088 }
6089 return VINF_SUCCESS;
6090}
6091
6092
6093/** Opcode 0x0f 0xc0. */
6094FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6095{
6096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6097 IEMOP_HLP_MIN_486();
6098 IEMOP_MNEMONIC("xadd Eb,Gb");
6099
6100 /*
6101 * If rm is denoting a register, no more instruction bytes.
6102 */
6103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6104 {
6105 IEMOP_HLP_NO_LOCK_PREFIX();
6106
6107 IEM_MC_BEGIN(3, 0);
6108 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6109 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6111
6112 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6113 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6114 IEM_MC_REF_EFLAGS(pEFlags);
6115 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6116
6117 IEM_MC_ADVANCE_RIP();
6118 IEM_MC_END();
6119 }
6120 else
6121 {
6122 /*
6123 * We're accessing memory.
6124 */
6125 IEM_MC_BEGIN(3, 3);
6126 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6127 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6128 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6129 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6131
6132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6133 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6134 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6135 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6136 IEM_MC_FETCH_EFLAGS(EFlags);
6137 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6141
6142 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6143 IEM_MC_COMMIT_EFLAGS(EFlags);
6144 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6145 IEM_MC_ADVANCE_RIP();
6146 IEM_MC_END();
6147 return VINF_SUCCESS;
6148 }
6149 return VINF_SUCCESS;
6150}
6151
6152
6153/** Opcode 0x0f 0xc1. */
6154FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6155{
6156 IEMOP_MNEMONIC("xadd Ev,Gv");
6157 IEMOP_HLP_MIN_486();
6158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6159
6160 /*
6161 * If rm is denoting a register, no more instruction bytes.
6162 */
6163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6164 {
6165 IEMOP_HLP_NO_LOCK_PREFIX();
6166
6167 switch (pIemCpu->enmEffOpSize)
6168 {
6169 case IEMMODE_16BIT:
6170 IEM_MC_BEGIN(3, 0);
6171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6172 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6174
6175 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6176 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6177 IEM_MC_REF_EFLAGS(pEFlags);
6178 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6179
6180 IEM_MC_ADVANCE_RIP();
6181 IEM_MC_END();
6182 return VINF_SUCCESS;
6183
6184 case IEMMODE_32BIT:
6185 IEM_MC_BEGIN(3, 0);
6186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6187 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6188 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6189
6190 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6191 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6192 IEM_MC_REF_EFLAGS(pEFlags);
6193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6194
6195 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6197 IEM_MC_ADVANCE_RIP();
6198 IEM_MC_END();
6199 return VINF_SUCCESS;
6200
6201 case IEMMODE_64BIT:
6202 IEM_MC_BEGIN(3, 0);
6203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6204 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6206
6207 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6208 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6209 IEM_MC_REF_EFLAGS(pEFlags);
6210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6211
6212 IEM_MC_ADVANCE_RIP();
6213 IEM_MC_END();
6214 return VINF_SUCCESS;
6215
6216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6217 }
6218 }
6219 else
6220 {
6221 /*
6222 * We're accessing memory.
6223 */
6224 switch (pIemCpu->enmEffOpSize)
6225 {
6226 case IEMMODE_16BIT:
6227 IEM_MC_BEGIN(3, 3);
6228 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6229 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6230 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6231 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6233
6234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6235 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6236 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6237 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6238 IEM_MC_FETCH_EFLAGS(EFlags);
6239 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6240 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6241 else
6242 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6243
6244 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6245 IEM_MC_COMMIT_EFLAGS(EFlags);
6246 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6247 IEM_MC_ADVANCE_RIP();
6248 IEM_MC_END();
6249 return VINF_SUCCESS;
6250
6251 case IEMMODE_32BIT:
6252 IEM_MC_BEGIN(3, 3);
6253 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6254 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6255 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6256 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6258
6259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6260 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6261 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6262 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6263 IEM_MC_FETCH_EFLAGS(EFlags);
6264 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6265 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6266 else
6267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6268
6269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6270 IEM_MC_COMMIT_EFLAGS(EFlags);
6271 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6272 IEM_MC_ADVANCE_RIP();
6273 IEM_MC_END();
6274 return VINF_SUCCESS;
6275
6276 case IEMMODE_64BIT:
6277 IEM_MC_BEGIN(3, 3);
6278 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6279 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6280 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6281 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6283
6284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6285 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6286 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6287 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6288 IEM_MC_FETCH_EFLAGS(EFlags);
6289 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6293
6294 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6295 IEM_MC_COMMIT_EFLAGS(EFlags);
6296 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6297 IEM_MC_ADVANCE_RIP();
6298 IEM_MC_END();
6299 return VINF_SUCCESS;
6300
6301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6302 }
6303 }
6304}
6305
6306/** Opcode 0x0f 0xc2. */
6307FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6308
6309/** Opcode 0x0f 0xc3. */
6310FNIEMOP_STUB(iemOp_movnti_My_Gy);
6311
6312/** Opcode 0x0f 0xc4. */
6313FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6314
6315/** Opcode 0x0f 0xc5. */
6316FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6317
6318/** Opcode 0x0f 0xc6. */
6319FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6320
6321
6322/** Opcode 0x0f 0xc7 !11/1. */
6323FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6324{
6325 IEMOP_MNEMONIC("cmpxchg8b Mq");
6326
6327 IEM_MC_BEGIN(4, 3);
6328 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6329 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6330 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6331 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6332 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6333 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6335
6336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6337 IEMOP_HLP_DONE_DECODING();
6338 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6339
6340 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6341 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6342 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6343
6344 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6345 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6346 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6347
6348 IEM_MC_FETCH_EFLAGS(EFlags);
6349 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6350 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6351 else
6352 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6353
6354 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6355 IEM_MC_COMMIT_EFLAGS(EFlags);
6356 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6357 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6358 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6359 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6360 IEM_MC_ENDIF();
6361 IEM_MC_ADVANCE_RIP();
6362
6363 IEM_MC_END();
6364 return VINF_SUCCESS;
6365}
6366
6367
6368/** Opcode REX.W 0x0f 0xc7 !11/1. */
6369FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6370
6371/** Opcode 0x0f 0xc7 11/6. */
6372FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6373
6374/** Opcode 0x0f 0xc7 !11/6. */
6375FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6376
6377/** Opcode 0x66 0x0f 0xc7 !11/6. */
6378FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6379
6380/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6381FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6382
6383/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6384FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6385
6386
6387/** Opcode 0x0f 0xc7. */
6388FNIEMOP_DEF(iemOp_Grp9)
6389{
6390 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6392 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6393 {
6394 case 0: case 2: case 3: case 4: case 5:
6395 return IEMOP_RAISE_INVALID_OPCODE();
6396 case 1:
6397 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6398 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6399 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6400 return IEMOP_RAISE_INVALID_OPCODE();
6401 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6402 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6403 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6404 case 6:
6405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6406 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6407 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6408 {
6409 case 0:
6410 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6411 case IEM_OP_PRF_SIZE_OP:
6412 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6413 case IEM_OP_PRF_REPZ:
6414 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6415 default:
6416 return IEMOP_RAISE_INVALID_OPCODE();
6417 }
6418 case 7:
6419 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6420 {
6421 case 0:
6422 case IEM_OP_PRF_REPZ:
6423 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6424 default:
6425 return IEMOP_RAISE_INVALID_OPCODE();
6426 }
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6428 }
6429}
6430
6431
6432/**
6433 * Common 'bswap register' helper.
6434 */
6435FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6436{
6437 IEMOP_HLP_NO_LOCK_PREFIX();
6438 switch (pIemCpu->enmEffOpSize)
6439 {
6440 case IEMMODE_16BIT:
6441 IEM_MC_BEGIN(1, 0);
6442 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6443 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6444 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 return VINF_SUCCESS;
6448
6449 case IEMMODE_32BIT:
6450 IEM_MC_BEGIN(1, 0);
6451 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6452 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6453 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6454 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6455 IEM_MC_ADVANCE_RIP();
6456 IEM_MC_END();
6457 return VINF_SUCCESS;
6458
6459 case IEMMODE_64BIT:
6460 IEM_MC_BEGIN(1, 0);
6461 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6462 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6463 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6464 IEM_MC_ADVANCE_RIP();
6465 IEM_MC_END();
6466 return VINF_SUCCESS;
6467
6468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6469 }
6470}
6471
6472
6473/** Opcode 0x0f 0xc8. */
6474FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6475{
6476 IEMOP_MNEMONIC("bswap rAX/r8");
6477 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6478 prefix. REX.B is the correct prefix it appears. For a parallel
6479 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6480 IEMOP_HLP_MIN_486();
6481 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6482}
6483
6484
6485/** Opcode 0x0f 0xc9. */
6486FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6487{
6488 IEMOP_MNEMONIC("bswap rCX/r9");
6489 IEMOP_HLP_MIN_486();
6490 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6491}
6492
6493
6494/** Opcode 0x0f 0xca. */
6495FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6496{
6497 IEMOP_MNEMONIC("bswap rDX/r9");
6498 IEMOP_HLP_MIN_486();
6499 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6500}
6501
6502
6503/** Opcode 0x0f 0xcb. */
6504FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6505{
6506 IEMOP_MNEMONIC("bswap rBX/r9");
6507 IEMOP_HLP_MIN_486();
6508 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6509}
6510
6511
6512/** Opcode 0x0f 0xcc. */
6513FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6514{
6515 IEMOP_MNEMONIC("bswap rSP/r12");
6516 IEMOP_HLP_MIN_486();
6517 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6518}
6519
6520
6521/** Opcode 0x0f 0xcd. */
6522FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6523{
6524 IEMOP_MNEMONIC("bswap rBP/r13");
6525 IEMOP_HLP_MIN_486();
6526 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6527}
6528
6529
6530/** Opcode 0x0f 0xce. */
6531FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6532{
6533 IEMOP_MNEMONIC("bswap rSI/r14");
6534 IEMOP_HLP_MIN_486();
6535 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6536}
6537
6538
6539/** Opcode 0x0f 0xcf. */
6540FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6541{
6542 IEMOP_MNEMONIC("bswap rDI/r15");
6543 IEMOP_HLP_MIN_486();
6544 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6545}
6546
6547
6548
6549/** Opcode 0x0f 0xd0. */
6550FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6551/** Opcode 0x0f 0xd1. */
6552FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6553/** Opcode 0x0f 0xd2. */
6554FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6555/** Opcode 0x0f 0xd3. */
6556FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6557/** Opcode 0x0f 0xd4. */
6558FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6559/** Opcode 0x0f 0xd5. */
6560FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6561/** Opcode 0x0f 0xd6. */
6562FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6563
6564
6565/** Opcode 0x0f 0xd7. */
6566FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6567{
6568 /* Docs says register only. */
6569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6570 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6571 return IEMOP_RAISE_INVALID_OPCODE();
6572
6573 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6574 /** @todo testcase: Check that the instruction implicitly clears the high
6575 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6576 * and opcode modifications are made to work with the whole width (not
6577 * just 128). */
6578 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6579 {
6580 case IEM_OP_PRF_SIZE_OP: /* SSE */
6581 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6582 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6583 IEM_MC_BEGIN(2, 0);
6584 IEM_MC_ARG(uint64_t *, pDst, 0);
6585 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6587 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6588 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6589 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6590 IEM_MC_ADVANCE_RIP();
6591 IEM_MC_END();
6592 return VINF_SUCCESS;
6593
6594 case 0: /* MMX */
6595 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6596 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6597 IEM_MC_BEGIN(2, 0);
6598 IEM_MC_ARG(uint64_t *, pDst, 0);
6599 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6601 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6602 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6603 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 return VINF_SUCCESS;
6607
6608 default:
6609 return IEMOP_RAISE_INVALID_OPCODE();
6610 }
6611}
6612
6613
6614/** Opcode 0x0f 0xd8. */
6615FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6616/** Opcode 0x0f 0xd9. */
6617FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6618/** Opcode 0x0f 0xda. */
6619FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6620/** Opcode 0x0f 0xdb. */
6621FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6622/** Opcode 0x0f 0xdc. */
6623FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6624/** Opcode 0x0f 0xdd. */
6625FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6626/** Opcode 0x0f 0xde. */
6627FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6628/** Opcode 0x0f 0xdf. */
6629FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6630/** Opcode 0x0f 0xe0. */
6631FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6632/** Opcode 0x0f 0xe1. */
6633FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6634/** Opcode 0x0f 0xe2. */
6635FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6636/** Opcode 0x0f 0xe3. */
6637FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6638/** Opcode 0x0f 0xe4. */
6639FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6640/** Opcode 0x0f 0xe5. */
6641FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6642/** Opcode 0x0f 0xe6. */
6643FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6644/** Opcode 0x0f 0xe7. */
6645FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6646/** Opcode 0x0f 0xe8. */
6647FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6648/** Opcode 0x0f 0xe9. */
6649FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6650/** Opcode 0x0f 0xea. */
6651FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6652/** Opcode 0x0f 0xeb. */
6653FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6654/** Opcode 0x0f 0xec. */
6655FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6656/** Opcode 0x0f 0xed. */
6657FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6658/** Opcode 0x0f 0xee. */
6659FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6660
6661
6662/** Opcode 0x0f 0xef. */
6663FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6664{
6665 IEMOP_MNEMONIC("pxor");
6666 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6667}
6668
6669
6670/** Opcode 0x0f 0xf0. */
6671FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6672/** Opcode 0x0f 0xf1. */
6673FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6674/** Opcode 0x0f 0xf2. */
6675FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6676/** Opcode 0x0f 0xf3. */
6677FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6678/** Opcode 0x0f 0xf4. */
6679FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6680/** Opcode 0x0f 0xf5. */
6681FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6682/** Opcode 0x0f 0xf6. */
6683FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6684/** Opcode 0x0f 0xf7. */
6685FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6686/** Opcode 0x0f 0xf8. */
6687FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6688/** Opcode 0x0f 0xf9. */
6689FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6690/** Opcode 0x0f 0xfa. */
6691FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6692/** Opcode 0x0f 0xfb. */
6693FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6694/** Opcode 0x0f 0xfc. */
6695FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6696/** Opcode 0x0f 0xfd. */
6697FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6698/** Opcode 0x0f 0xfe. */
6699FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6700
6701
6702const PFNIEMOP g_apfnTwoByteMap[256] =
6703{
6704 /* 0x00 */ iemOp_Grp6,
6705 /* 0x01 */ iemOp_Grp7,
6706 /* 0x02 */ iemOp_lar_Gv_Ew,
6707 /* 0x03 */ iemOp_lsl_Gv_Ew,
6708 /* 0x04 */ iemOp_Invalid,
6709 /* 0x05 */ iemOp_syscall,
6710 /* 0x06 */ iemOp_clts,
6711 /* 0x07 */ iemOp_sysret,
6712 /* 0x08 */ iemOp_invd,
6713 /* 0x09 */ iemOp_wbinvd,
6714 /* 0x0a */ iemOp_Invalid,
6715 /* 0x0b */ iemOp_ud2,
6716 /* 0x0c */ iemOp_Invalid,
6717 /* 0x0d */ iemOp_nop_Ev_GrpP,
6718 /* 0x0e */ iemOp_femms,
6719 /* 0x0f */ iemOp_3Dnow,
6720 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6721 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6722 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6723 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6724 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6725 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6726 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6727 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6728 /* 0x18 */ iemOp_prefetch_Grp16,
6729 /* 0x19 */ iemOp_nop_Ev,
6730 /* 0x1a */ iemOp_nop_Ev,
6731 /* 0x1b */ iemOp_nop_Ev,
6732 /* 0x1c */ iemOp_nop_Ev,
6733 /* 0x1d */ iemOp_nop_Ev,
6734 /* 0x1e */ iemOp_nop_Ev,
6735 /* 0x1f */ iemOp_nop_Ev,
6736 /* 0x20 */ iemOp_mov_Rd_Cd,
6737 /* 0x21 */ iemOp_mov_Rd_Dd,
6738 /* 0x22 */ iemOp_mov_Cd_Rd,
6739 /* 0x23 */ iemOp_mov_Dd_Rd,
6740 /* 0x24 */ iemOp_mov_Rd_Td,
6741 /* 0x25 */ iemOp_Invalid,
6742 /* 0x26 */ iemOp_mov_Td_Rd,
6743 /* 0x27 */ iemOp_Invalid,
6744 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6745 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6746 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6747 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6748 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6749 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6750 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6751 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6752 /* 0x30 */ iemOp_wrmsr,
6753 /* 0x31 */ iemOp_rdtsc,
6754 /* 0x32 */ iemOp_rdmsr,
6755 /* 0x33 */ iemOp_rdpmc,
6756 /* 0x34 */ iemOp_sysenter,
6757 /* 0x35 */ iemOp_sysexit,
6758 /* 0x36 */ iemOp_Invalid,
6759 /* 0x37 */ iemOp_getsec,
6760 /* 0x38 */ iemOp_3byte_Esc_A4,
6761 /* 0x39 */ iemOp_Invalid,
6762 /* 0x3a */ iemOp_3byte_Esc_A5,
6763 /* 0x3b */ iemOp_Invalid,
6764 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6765 /* 0x3d */ iemOp_Invalid,
6766 /* 0x3e */ iemOp_Invalid,
6767 /* 0x3f */ iemOp_Invalid,
6768 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6769 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6770 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6771 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6772 /* 0x44 */ iemOp_cmove_Gv_Ev,
6773 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6774 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6775 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6776 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6777 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6778 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6779 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6780 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6781 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6782 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6783 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6784 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6785 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6786 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6787 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6788 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6789 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6790 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6791 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6792 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6793 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6794 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6795 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6796 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6797 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6798 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6799 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6800 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6801 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6802 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6803 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6804 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6805 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6806 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6807 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6808 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6809 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6810 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6811 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6812 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6813 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6814 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6815 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6816 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6817 /* 0x71 */ iemOp_Grp12,
6818 /* 0x72 */ iemOp_Grp13,
6819 /* 0x73 */ iemOp_Grp14,
6820 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6821 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6822 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6823 /* 0x77 */ iemOp_emms,
6824 /* 0x78 */ iemOp_vmread_AmdGrp17,
6825 /* 0x79 */ iemOp_vmwrite,
6826 /* 0x7a */ iemOp_Invalid,
6827 /* 0x7b */ iemOp_Invalid,
6828 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6829 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6830 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6831 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6832 /* 0x80 */ iemOp_jo_Jv,
6833 /* 0x81 */ iemOp_jno_Jv,
6834 /* 0x82 */ iemOp_jc_Jv,
6835 /* 0x83 */ iemOp_jnc_Jv,
6836 /* 0x84 */ iemOp_je_Jv,
6837 /* 0x85 */ iemOp_jne_Jv,
6838 /* 0x86 */ iemOp_jbe_Jv,
6839 /* 0x87 */ iemOp_jnbe_Jv,
6840 /* 0x88 */ iemOp_js_Jv,
6841 /* 0x89 */ iemOp_jns_Jv,
6842 /* 0x8a */ iemOp_jp_Jv,
6843 /* 0x8b */ iemOp_jnp_Jv,
6844 /* 0x8c */ iemOp_jl_Jv,
6845 /* 0x8d */ iemOp_jnl_Jv,
6846 /* 0x8e */ iemOp_jle_Jv,
6847 /* 0x8f */ iemOp_jnle_Jv,
6848 /* 0x90 */ iemOp_seto_Eb,
6849 /* 0x91 */ iemOp_setno_Eb,
6850 /* 0x92 */ iemOp_setc_Eb,
6851 /* 0x93 */ iemOp_setnc_Eb,
6852 /* 0x94 */ iemOp_sete_Eb,
6853 /* 0x95 */ iemOp_setne_Eb,
6854 /* 0x96 */ iemOp_setbe_Eb,
6855 /* 0x97 */ iemOp_setnbe_Eb,
6856 /* 0x98 */ iemOp_sets_Eb,
6857 /* 0x99 */ iemOp_setns_Eb,
6858 /* 0x9a */ iemOp_setp_Eb,
6859 /* 0x9b */ iemOp_setnp_Eb,
6860 /* 0x9c */ iemOp_setl_Eb,
6861 /* 0x9d */ iemOp_setnl_Eb,
6862 /* 0x9e */ iemOp_setle_Eb,
6863 /* 0x9f */ iemOp_setnle_Eb,
6864 /* 0xa0 */ iemOp_push_fs,
6865 /* 0xa1 */ iemOp_pop_fs,
6866 /* 0xa2 */ iemOp_cpuid,
6867 /* 0xa3 */ iemOp_bt_Ev_Gv,
6868 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6869 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6870 /* 0xa6 */ iemOp_Invalid,
6871 /* 0xa7 */ iemOp_Invalid,
6872 /* 0xa8 */ iemOp_push_gs,
6873 /* 0xa9 */ iemOp_pop_gs,
6874 /* 0xaa */ iemOp_rsm,
6875 /* 0xab */ iemOp_bts_Ev_Gv,
6876 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6877 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6878 /* 0xae */ iemOp_Grp15,
6879 /* 0xaf */ iemOp_imul_Gv_Ev,
6880 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6881 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6882 /* 0xb2 */ iemOp_lss_Gv_Mp,
6883 /* 0xb3 */ iemOp_btr_Ev_Gv,
6884 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6885 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6886 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6887 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6888 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6889 /* 0xb9 */ iemOp_Grp10,
6890 /* 0xba */ iemOp_Grp8,
6891 /* 0xbd */ iemOp_btc_Ev_Gv,
6892 /* 0xbc */ iemOp_bsf_Gv_Ev,
6893 /* 0xbd */ iemOp_bsr_Gv_Ev,
6894 /* 0xbe */ iemOp_movsx_Gv_Eb,
6895 /* 0xbf */ iemOp_movsx_Gv_Ew,
6896 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6897 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6898 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6899 /* 0xc3 */ iemOp_movnti_My_Gy,
6900 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6901 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6902 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6903 /* 0xc7 */ iemOp_Grp9,
6904 /* 0xc8 */ iemOp_bswap_rAX_r8,
6905 /* 0xc9 */ iemOp_bswap_rCX_r9,
6906 /* 0xca */ iemOp_bswap_rDX_r10,
6907 /* 0xcb */ iemOp_bswap_rBX_r11,
6908 /* 0xcc */ iemOp_bswap_rSP_r12,
6909 /* 0xcd */ iemOp_bswap_rBP_r13,
6910 /* 0xce */ iemOp_bswap_rSI_r14,
6911 /* 0xcf */ iemOp_bswap_rDI_r15,
6912 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6913 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6914 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6915 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6916 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6917 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6918 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6919 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6920 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6921 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6922 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6923 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6924 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6925 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6926 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6927 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6928 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6929 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6930 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6931 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6932 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6933 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6934 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6935 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6936 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6937 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6938 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6939 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6940 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6941 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6942 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6943 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6944 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6945 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6946 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6947 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6948 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6949 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6950 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6951 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6952 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6953 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6954 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6955 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6956 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6957 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6958 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6959 /* 0xff */ iemOp_Invalid
6960};
6961
6962/** @} */
6963
6964
6965/** @name One byte opcodes.
6966 *
6967 * @{
6968 */
6969
6970/** Opcode 0x00. */
6971FNIEMOP_DEF(iemOp_add_Eb_Gb)
6972{
6973 IEMOP_MNEMONIC("add Eb,Gb");
6974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6975}
6976
6977
6978/** Opcode 0x01. */
6979FNIEMOP_DEF(iemOp_add_Ev_Gv)
6980{
6981 IEMOP_MNEMONIC("add Ev,Gv");
6982 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6983}
6984
6985
6986/** Opcode 0x02. */
6987FNIEMOP_DEF(iemOp_add_Gb_Eb)
6988{
6989 IEMOP_MNEMONIC("add Gb,Eb");
6990 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6991}
6992
6993
6994/** Opcode 0x03. */
6995FNIEMOP_DEF(iemOp_add_Gv_Ev)
6996{
6997 IEMOP_MNEMONIC("add Gv,Ev");
6998 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6999}
7000
7001
7002/** Opcode 0x04. */
7003FNIEMOP_DEF(iemOp_add_Al_Ib)
7004{
7005 IEMOP_MNEMONIC("add al,Ib");
7006 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7007}
7008
7009
7010/** Opcode 0x05. */
7011FNIEMOP_DEF(iemOp_add_eAX_Iz)
7012{
7013 IEMOP_MNEMONIC("add rAX,Iz");
7014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7015}
7016
7017
7018/** Opcode 0x06. */
7019FNIEMOP_DEF(iemOp_push_ES)
7020{
7021 IEMOP_MNEMONIC("push es");
7022 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7023}
7024
7025
7026/** Opcode 0x07. */
7027FNIEMOP_DEF(iemOp_pop_ES)
7028{
7029 IEMOP_MNEMONIC("pop es");
7030 IEMOP_HLP_NO_64BIT();
7031 IEMOP_HLP_NO_LOCK_PREFIX();
7032 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7033}
7034
7035
7036/** Opcode 0x08. */
7037FNIEMOP_DEF(iemOp_or_Eb_Gb)
7038{
7039 IEMOP_MNEMONIC("or Eb,Gb");
7040 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7041 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7042}
7043
7044
7045/** Opcode 0x09. */
7046FNIEMOP_DEF(iemOp_or_Ev_Gv)
7047{
7048 IEMOP_MNEMONIC("or Ev,Gv ");
7049 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7050 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7051}
7052
7053
7054/** Opcode 0x0a. */
7055FNIEMOP_DEF(iemOp_or_Gb_Eb)
7056{
7057 IEMOP_MNEMONIC("or Gb,Eb");
7058 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7059 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7060}
7061
7062
7063/** Opcode 0x0b. */
7064FNIEMOP_DEF(iemOp_or_Gv_Ev)
7065{
7066 IEMOP_MNEMONIC("or Gv,Ev");
7067 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7068 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7069}
7070
7071
7072/** Opcode 0x0c. */
7073FNIEMOP_DEF(iemOp_or_Al_Ib)
7074{
7075 IEMOP_MNEMONIC("or al,Ib");
7076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7077 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7078}
7079
7080
7081/** Opcode 0x0d. */
7082FNIEMOP_DEF(iemOp_or_eAX_Iz)
7083{
7084 IEMOP_MNEMONIC("or rAX,Iz");
7085 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7086 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7087}
7088
7089
7090/** Opcode 0x0e. */
7091FNIEMOP_DEF(iemOp_push_CS)
7092{
7093 IEMOP_MNEMONIC("push cs");
7094 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7095}
7096
7097
7098/** Opcode 0x0f. */
7099FNIEMOP_DEF(iemOp_2byteEscape)
7100{
7101 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7102 /** @todo PUSH CS on 8086, undefined on 80186. */
7103 IEMOP_HLP_MIN_286();
7104 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7105}
7106
7107/** Opcode 0x10. */
7108FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7109{
7110 IEMOP_MNEMONIC("adc Eb,Gb");
7111 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7112}
7113
7114
7115/** Opcode 0x11. */
7116FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7117{
7118 IEMOP_MNEMONIC("adc Ev,Gv");
7119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7120}
7121
7122
7123/** Opcode 0x12. */
7124FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7125{
7126 IEMOP_MNEMONIC("adc Gb,Eb");
7127 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7128}
7129
7130
7131/** Opcode 0x13. */
7132FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7133{
7134 IEMOP_MNEMONIC("adc Gv,Ev");
7135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7136}
7137
7138
7139/** Opcode 0x14. */
7140FNIEMOP_DEF(iemOp_adc_Al_Ib)
7141{
7142 IEMOP_MNEMONIC("adc al,Ib");
7143 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7144}
7145
7146
7147/** Opcode 0x15. */
7148FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7149{
7150 IEMOP_MNEMONIC("adc rAX,Iz");
7151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7152}
7153
7154
7155/** Opcode 0x16. */
7156FNIEMOP_DEF(iemOp_push_SS)
7157{
7158 IEMOP_MNEMONIC("push ss");
7159 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7160}
7161
7162
7163/** Opcode 0x17. */
7164FNIEMOP_DEF(iemOp_pop_SS)
7165{
7166 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7167 IEMOP_HLP_NO_LOCK_PREFIX();
7168 IEMOP_HLP_NO_64BIT();
7169 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7170}
7171
7172
7173/** Opcode 0x18. */
7174FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7175{
7176 IEMOP_MNEMONIC("sbb Eb,Gb");
7177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7178}
7179
7180
7181/** Opcode 0x19. */
7182FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7183{
7184 IEMOP_MNEMONIC("sbb Ev,Gv");
7185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7186}
7187
7188
7189/** Opcode 0x1a. */
7190FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7191{
7192 IEMOP_MNEMONIC("sbb Gb,Eb");
7193 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7194}
7195
7196
7197/** Opcode 0x1b. */
7198FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7199{
7200 IEMOP_MNEMONIC("sbb Gv,Ev");
7201 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7202}
7203
7204
7205/** Opcode 0x1c. */
7206FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7207{
7208 IEMOP_MNEMONIC("sbb al,Ib");
7209 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7210}
7211
7212
7213/** Opcode 0x1d. */
7214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7215{
7216 IEMOP_MNEMONIC("sbb rAX,Iz");
7217 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7218}
7219
7220
7221/** Opcode 0x1e. */
7222FNIEMOP_DEF(iemOp_push_DS)
7223{
7224 IEMOP_MNEMONIC("push ds");
7225 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7226}
7227
7228
7229/** Opcode 0x1f. */
7230FNIEMOP_DEF(iemOp_pop_DS)
7231{
7232 IEMOP_MNEMONIC("pop ds");
7233 IEMOP_HLP_NO_LOCK_PREFIX();
7234 IEMOP_HLP_NO_64BIT();
7235 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7236}
7237
7238
7239/** Opcode 0x20. */
7240FNIEMOP_DEF(iemOp_and_Eb_Gb)
7241{
7242 IEMOP_MNEMONIC("and Eb,Gb");
7243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7244 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7245}
7246
7247
7248/** Opcode 0x21. */
7249FNIEMOP_DEF(iemOp_and_Ev_Gv)
7250{
7251 IEMOP_MNEMONIC("and Ev,Gv");
7252 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7253 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7254}
7255
7256
7257/** Opcode 0x22. */
7258FNIEMOP_DEF(iemOp_and_Gb_Eb)
7259{
7260 IEMOP_MNEMONIC("and Gb,Eb");
7261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7262 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7263}
7264
7265
7266/** Opcode 0x23. */
7267FNIEMOP_DEF(iemOp_and_Gv_Ev)
7268{
7269 IEMOP_MNEMONIC("and Gv,Ev");
7270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7271 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7272}
7273
7274
7275/** Opcode 0x24. */
7276FNIEMOP_DEF(iemOp_and_Al_Ib)
7277{
7278 IEMOP_MNEMONIC("and al,Ib");
7279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7280 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7281}
7282
7283
7284/** Opcode 0x25. */
7285FNIEMOP_DEF(iemOp_and_eAX_Iz)
7286{
7287 IEMOP_MNEMONIC("and rAX,Iz");
7288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7289 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7290}
7291
7292
7293/** Opcode 0x26. */
7294FNIEMOP_DEF(iemOp_seg_ES)
7295{
7296 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7297 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7298 pIemCpu->iEffSeg = X86_SREG_ES;
7299
7300 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7301 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7302}
7303
7304
7305/** Opcode 0x27. */
7306FNIEMOP_DEF(iemOp_daa)
7307{
7308 IEMOP_MNEMONIC("daa AL");
7309 IEMOP_HLP_NO_64BIT();
7310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7312 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7313}
7314
7315
7316/** Opcode 0x28. */
7317FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7318{
7319 IEMOP_MNEMONIC("sub Eb,Gb");
7320 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7321}
7322
7323
7324/** Opcode 0x29. */
7325FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7326{
7327 IEMOP_MNEMONIC("sub Ev,Gv");
7328 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7329}
7330
7331
7332/** Opcode 0x2a. */
7333FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7334{
7335 IEMOP_MNEMONIC("sub Gb,Eb");
7336 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7337}
7338
7339
7340/** Opcode 0x2b. */
7341FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7342{
7343 IEMOP_MNEMONIC("sub Gv,Ev");
7344 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7345}
7346
7347
7348/** Opcode 0x2c. */
7349FNIEMOP_DEF(iemOp_sub_Al_Ib)
7350{
7351 IEMOP_MNEMONIC("sub al,Ib");
7352 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7353}
7354
7355
7356/** Opcode 0x2d. */
7357FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7358{
7359 IEMOP_MNEMONIC("sub rAX,Iz");
7360 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7361}
7362
7363
7364/** Opcode 0x2e. */
7365FNIEMOP_DEF(iemOp_seg_CS)
7366{
7367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7368 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7369 pIemCpu->iEffSeg = X86_SREG_CS;
7370
7371 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7372 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7373}
7374
7375
7376/** Opcode 0x2f. */
7377FNIEMOP_DEF(iemOp_das)
7378{
7379 IEMOP_MNEMONIC("das AL");
7380 IEMOP_HLP_NO_64BIT();
7381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7382 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7383 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7384}
7385
7386
7387/** Opcode 0x30. */
7388FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7389{
7390 IEMOP_MNEMONIC("xor Eb,Gb");
7391 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7393}
7394
7395
7396/** Opcode 0x31. */
7397FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7398{
7399 IEMOP_MNEMONIC("xor Ev,Gv");
7400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7401 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7402}
7403
7404
7405/** Opcode 0x32. */
7406FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7407{
7408 IEMOP_MNEMONIC("xor Gb,Eb");
7409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7410 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7411}
7412
7413
7414/** Opcode 0x33. */
7415FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7416{
7417 IEMOP_MNEMONIC("xor Gv,Ev");
7418 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7419 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7420}
7421
7422
7423/** Opcode 0x34. */
7424FNIEMOP_DEF(iemOp_xor_Al_Ib)
7425{
7426 IEMOP_MNEMONIC("xor al,Ib");
7427 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7428 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7429}
7430
7431
7432/** Opcode 0x35. */
7433FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7434{
7435 IEMOP_MNEMONIC("xor rAX,Iz");
7436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7437 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7438}
7439
7440
7441/** Opcode 0x36. */
7442FNIEMOP_DEF(iemOp_seg_SS)
7443{
7444 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7445 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7446 pIemCpu->iEffSeg = X86_SREG_SS;
7447
7448 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7449 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7450}
7451
7452
7453/** Opcode 0x37. */
7454FNIEMOP_STUB(iemOp_aaa);
7455
7456
7457/** Opcode 0x38. */
7458FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7459{
7460 IEMOP_MNEMONIC("cmp Eb,Gb");
7461 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7463}
7464
7465
7466/** Opcode 0x39. */
7467FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7468{
7469 IEMOP_MNEMONIC("cmp Ev,Gv");
7470 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7472}
7473
7474
7475/** Opcode 0x3a. */
7476FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7477{
7478 IEMOP_MNEMONIC("cmp Gb,Eb");
7479 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7480}
7481
7482
7483/** Opcode 0x3b. */
7484FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7485{
7486 IEMOP_MNEMONIC("cmp Gv,Ev");
7487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7488}
7489
7490
7491/** Opcode 0x3c. */
7492FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7493{
7494 IEMOP_MNEMONIC("cmp al,Ib");
7495 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7496}
7497
7498
7499/** Opcode 0x3d. */
7500FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7501{
7502 IEMOP_MNEMONIC("cmp rAX,Iz");
7503 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7504}
7505
7506
7507/** Opcode 0x3e. */
7508FNIEMOP_DEF(iemOp_seg_DS)
7509{
7510 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7511 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7512 pIemCpu->iEffSeg = X86_SREG_DS;
7513
7514 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7515 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7516}
7517
7518
7519/** Opcode 0x3f. */
7520FNIEMOP_STUB(iemOp_aas);
7521
7522/**
7523 * Common 'inc/dec/not/neg register' helper.
7524 */
7525FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7526{
7527 IEMOP_HLP_NO_LOCK_PREFIX();
7528 switch (pIemCpu->enmEffOpSize)
7529 {
7530 case IEMMODE_16BIT:
7531 IEM_MC_BEGIN(2, 0);
7532 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7533 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7534 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7535 IEM_MC_REF_EFLAGS(pEFlags);
7536 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7537 IEM_MC_ADVANCE_RIP();
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540
7541 case IEMMODE_32BIT:
7542 IEM_MC_BEGIN(2, 0);
7543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7544 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7545 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7546 IEM_MC_REF_EFLAGS(pEFlags);
7547 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7548 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7549 IEM_MC_ADVANCE_RIP();
7550 IEM_MC_END();
7551 return VINF_SUCCESS;
7552
7553 case IEMMODE_64BIT:
7554 IEM_MC_BEGIN(2, 0);
7555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7556 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7557 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7558 IEM_MC_REF_EFLAGS(pEFlags);
7559 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7560 IEM_MC_ADVANCE_RIP();
7561 IEM_MC_END();
7562 return VINF_SUCCESS;
7563 }
7564 return VINF_SUCCESS;
7565}
7566
7567
7568/** Opcode 0x40. */
7569FNIEMOP_DEF(iemOp_inc_eAX)
7570{
7571 /*
7572 * This is a REX prefix in 64-bit mode.
7573 */
7574 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7575 {
7576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7577 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7578
7579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7581 }
7582
7583 IEMOP_MNEMONIC("inc eAX");
7584 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7585}
7586
7587
7588/** Opcode 0x41. */
7589FNIEMOP_DEF(iemOp_inc_eCX)
7590{
7591 /*
7592 * This is a REX prefix in 64-bit mode.
7593 */
7594 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7595 {
7596 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7597 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7598 pIemCpu->uRexB = 1 << 3;
7599
7600 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7601 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7602 }
7603
7604 IEMOP_MNEMONIC("inc eCX");
7605 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7606}
7607
7608
7609/** Opcode 0x42. */
7610FNIEMOP_DEF(iemOp_inc_eDX)
7611{
7612 /*
7613 * This is a REX prefix in 64-bit mode.
7614 */
7615 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7616 {
7617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7618 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7619 pIemCpu->uRexIndex = 1 << 3;
7620
7621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7623 }
7624
7625 IEMOP_MNEMONIC("inc eDX");
7626 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7627}
7628
7629
7630
7631/** Opcode 0x43. */
7632FNIEMOP_DEF(iemOp_inc_eBX)
7633{
7634 /*
7635 * This is a REX prefix in 64-bit mode.
7636 */
7637 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7638 {
7639 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7640 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7641 pIemCpu->uRexB = 1 << 3;
7642 pIemCpu->uRexIndex = 1 << 3;
7643
7644 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7645 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7646 }
7647
7648 IEMOP_MNEMONIC("inc eBX");
7649 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7650}
7651
7652
7653/** Opcode 0x44. */
7654FNIEMOP_DEF(iemOp_inc_eSP)
7655{
7656 /*
7657 * This is a REX prefix in 64-bit mode.
7658 */
7659 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7660 {
7661 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7662 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7663 pIemCpu->uRexReg = 1 << 3;
7664
7665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7666 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7667 }
7668
7669 IEMOP_MNEMONIC("inc eSP");
7670 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7671}
7672
7673
7674/** Opcode 0x45. */
7675FNIEMOP_DEF(iemOp_inc_eBP)
7676{
7677 /*
7678 * This is a REX prefix in 64-bit mode.
7679 */
7680 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7681 {
7682 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7683 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7684 pIemCpu->uRexReg = 1 << 3;
7685 pIemCpu->uRexB = 1 << 3;
7686
7687 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7688 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7689 }
7690
7691 IEMOP_MNEMONIC("inc eBP");
7692 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7693}
7694
7695
7696/** Opcode 0x46. */
7697FNIEMOP_DEF(iemOp_inc_eSI)
7698{
7699 /*
7700 * This is a REX prefix in 64-bit mode.
7701 */
7702 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7703 {
7704 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7705 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7706 pIemCpu->uRexReg = 1 << 3;
7707 pIemCpu->uRexIndex = 1 << 3;
7708
7709 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7710 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7711 }
7712
7713 IEMOP_MNEMONIC("inc eSI");
7714 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7715}
7716
7717
7718/** Opcode 0x47. */
7719FNIEMOP_DEF(iemOp_inc_eDI)
7720{
7721 /*
7722 * This is a REX prefix in 64-bit mode.
7723 */
7724 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7725 {
7726 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7727 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7728 pIemCpu->uRexReg = 1 << 3;
7729 pIemCpu->uRexB = 1 << 3;
7730 pIemCpu->uRexIndex = 1 << 3;
7731
7732 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7733 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7734 }
7735
7736 IEMOP_MNEMONIC("inc eDI");
7737 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7738}
7739
7740
7741/** Opcode 0x48. */
7742FNIEMOP_DEF(iemOp_dec_eAX)
7743{
7744 /*
7745 * This is a REX prefix in 64-bit mode.
7746 */
7747 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7748 {
7749 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7750 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7751 iemRecalEffOpSize(pIemCpu);
7752
7753 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7754 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7755 }
7756
7757 IEMOP_MNEMONIC("dec eAX");
7758 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7759}
7760
7761
7762/** Opcode 0x49. */
7763FNIEMOP_DEF(iemOp_dec_eCX)
7764{
7765 /*
7766 * This is a REX prefix in 64-bit mode.
7767 */
7768 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7769 {
7770 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7771 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7772 pIemCpu->uRexB = 1 << 3;
7773 iemRecalEffOpSize(pIemCpu);
7774
7775 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7776 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7777 }
7778
7779 IEMOP_MNEMONIC("dec eCX");
7780 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7781}
7782
7783
7784/** Opcode 0x4a. */
7785FNIEMOP_DEF(iemOp_dec_eDX)
7786{
7787 /*
7788 * This is a REX prefix in 64-bit mode.
7789 */
7790 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7791 {
7792 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7793 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7794 pIemCpu->uRexIndex = 1 << 3;
7795 iemRecalEffOpSize(pIemCpu);
7796
7797 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7798 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7799 }
7800
7801 IEMOP_MNEMONIC("dec eDX");
7802 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7803}
7804
7805
7806/** Opcode 0x4b. */
7807FNIEMOP_DEF(iemOp_dec_eBX)
7808{
7809 /*
7810 * This is a REX prefix in 64-bit mode.
7811 */
7812 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7813 {
7814 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7815 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7816 pIemCpu->uRexB = 1 << 3;
7817 pIemCpu->uRexIndex = 1 << 3;
7818 iemRecalEffOpSize(pIemCpu);
7819
7820 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7821 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7822 }
7823
7824 IEMOP_MNEMONIC("dec eBX");
7825 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7826}
7827
7828
7829/** Opcode 0x4c. */
7830FNIEMOP_DEF(iemOp_dec_eSP)
7831{
7832 /*
7833 * This is a REX prefix in 64-bit mode.
7834 */
7835 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7836 {
7837 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7838 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7839 pIemCpu->uRexReg = 1 << 3;
7840 iemRecalEffOpSize(pIemCpu);
7841
7842 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7843 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7844 }
7845
7846 IEMOP_MNEMONIC("dec eSP");
7847 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7848}
7849
7850
7851/** Opcode 0x4d. */
7852FNIEMOP_DEF(iemOp_dec_eBP)
7853{
7854 /*
7855 * This is a REX prefix in 64-bit mode.
7856 */
7857 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7858 {
7859 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7860 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7861 pIemCpu->uRexReg = 1 << 3;
7862 pIemCpu->uRexB = 1 << 3;
7863 iemRecalEffOpSize(pIemCpu);
7864
7865 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7866 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7867 }
7868
7869 IEMOP_MNEMONIC("dec eBP");
7870 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7871}
7872
7873
7874/** Opcode 0x4e. */
7875FNIEMOP_DEF(iemOp_dec_eSI)
7876{
7877 /*
7878 * This is a REX prefix in 64-bit mode.
7879 */
7880 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7881 {
7882 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7883 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7884 pIemCpu->uRexReg = 1 << 3;
7885 pIemCpu->uRexIndex = 1 << 3;
7886 iemRecalEffOpSize(pIemCpu);
7887
7888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7890 }
7891
7892 IEMOP_MNEMONIC("dec eSI");
7893 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7894}
7895
7896
7897/** Opcode 0x4f. */
7898FNIEMOP_DEF(iemOp_dec_eDI)
7899{
7900 /*
7901 * This is a REX prefix in 64-bit mode.
7902 */
7903 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7904 {
7905 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7906 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7907 pIemCpu->uRexReg = 1 << 3;
7908 pIemCpu->uRexB = 1 << 3;
7909 pIemCpu->uRexIndex = 1 << 3;
7910 iemRecalEffOpSize(pIemCpu);
7911
7912 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7913 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7914 }
7915
7916 IEMOP_MNEMONIC("dec eDI");
7917 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7918}
7919
7920
7921/**
7922 * Common 'push register' helper.
7923 */
7924FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7925{
7926 IEMOP_HLP_NO_LOCK_PREFIX();
7927 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7928 {
7929 iReg |= pIemCpu->uRexB;
7930 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7931 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7932 }
7933
7934 switch (pIemCpu->enmEffOpSize)
7935 {
7936 case IEMMODE_16BIT:
7937 IEM_MC_BEGIN(0, 1);
7938 IEM_MC_LOCAL(uint16_t, u16Value);
7939 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7940 IEM_MC_PUSH_U16(u16Value);
7941 IEM_MC_ADVANCE_RIP();
7942 IEM_MC_END();
7943 break;
7944
7945 case IEMMODE_32BIT:
7946 IEM_MC_BEGIN(0, 1);
7947 IEM_MC_LOCAL(uint32_t, u32Value);
7948 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7949 IEM_MC_PUSH_U32(u32Value);
7950 IEM_MC_ADVANCE_RIP();
7951 IEM_MC_END();
7952 break;
7953
7954 case IEMMODE_64BIT:
7955 IEM_MC_BEGIN(0, 1);
7956 IEM_MC_LOCAL(uint64_t, u64Value);
7957 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7958 IEM_MC_PUSH_U64(u64Value);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 break;
7962 }
7963
7964 return VINF_SUCCESS;
7965}
7966
7967
7968/** Opcode 0x50. */
7969FNIEMOP_DEF(iemOp_push_eAX)
7970{
7971 IEMOP_MNEMONIC("push rAX");
7972 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7973}
7974
7975
7976/** Opcode 0x51. */
7977FNIEMOP_DEF(iemOp_push_eCX)
7978{
7979 IEMOP_MNEMONIC("push rCX");
7980 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7981}
7982
7983
7984/** Opcode 0x52. */
7985FNIEMOP_DEF(iemOp_push_eDX)
7986{
7987 IEMOP_MNEMONIC("push rDX");
7988 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7989}
7990
7991
7992/** Opcode 0x53. */
7993FNIEMOP_DEF(iemOp_push_eBX)
7994{
7995 IEMOP_MNEMONIC("push rBX");
7996 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7997}
7998
7999
8000/** Opcode 0x54. */
8001FNIEMOP_DEF(iemOp_push_eSP)
8002{
8003 IEMOP_MNEMONIC("push rSP");
8004#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
8005 if (pIemCpu->uTargetCpu == IEMTARGETCPU_8086)
8006 {
8007 IEM_MC_BEGIN(0, 1);
8008 IEM_MC_LOCAL(uint16_t, u16Value);
8009 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8010 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8011 IEM_MC_PUSH_U16(u16Value);
8012 IEM_MC_ADVANCE_RIP();
8013 IEM_MC_END();
8014 }
8015#endif
8016 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8017}
8018
8019
8020/** Opcode 0x55. */
8021FNIEMOP_DEF(iemOp_push_eBP)
8022{
8023 IEMOP_MNEMONIC("push rBP");
8024 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8025}
8026
8027
8028/** Opcode 0x56. */
8029FNIEMOP_DEF(iemOp_push_eSI)
8030{
8031 IEMOP_MNEMONIC("push rSI");
8032 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8033}
8034
8035
8036/** Opcode 0x57. */
8037FNIEMOP_DEF(iemOp_push_eDI)
8038{
8039 IEMOP_MNEMONIC("push rDI");
8040 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8041}
8042
8043
8044/**
8045 * Common 'pop register' helper.
8046 */
8047FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8048{
8049 IEMOP_HLP_NO_LOCK_PREFIX();
8050 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8051 {
8052 iReg |= pIemCpu->uRexB;
8053 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8054 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8055 }
8056
8057 switch (pIemCpu->enmEffOpSize)
8058 {
8059 case IEMMODE_16BIT:
8060 IEM_MC_BEGIN(0, 1);
8061 IEM_MC_LOCAL(uint16_t, *pu16Dst);
8062 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8063 IEM_MC_POP_U16(pu16Dst);
8064 IEM_MC_ADVANCE_RIP();
8065 IEM_MC_END();
8066 break;
8067
8068 case IEMMODE_32BIT:
8069 IEM_MC_BEGIN(0, 1);
8070 IEM_MC_LOCAL(uint32_t, *pu32Dst);
8071 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8072 IEM_MC_POP_U32(pu32Dst);
8073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8074 IEM_MC_ADVANCE_RIP();
8075 IEM_MC_END();
8076 break;
8077
8078 case IEMMODE_64BIT:
8079 IEM_MC_BEGIN(0, 1);
8080 IEM_MC_LOCAL(uint64_t, *pu64Dst);
8081 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8082 IEM_MC_POP_U64(pu64Dst);
8083 IEM_MC_ADVANCE_RIP();
8084 IEM_MC_END();
8085 break;
8086 }
8087
8088 return VINF_SUCCESS;
8089}
8090
8091
8092/** Opcode 0x58. */
8093FNIEMOP_DEF(iemOp_pop_eAX)
8094{
8095 IEMOP_MNEMONIC("pop rAX");
8096 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8097}
8098
8099
8100/** Opcode 0x59. */
8101FNIEMOP_DEF(iemOp_pop_eCX)
8102{
8103 IEMOP_MNEMONIC("pop rCX");
8104 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8105}
8106
8107
8108/** Opcode 0x5a. */
8109FNIEMOP_DEF(iemOp_pop_eDX)
8110{
8111 IEMOP_MNEMONIC("pop rDX");
8112 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8113}
8114
8115
8116/** Opcode 0x5b. */
8117FNIEMOP_DEF(iemOp_pop_eBX)
8118{
8119 IEMOP_MNEMONIC("pop rBX");
8120 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8121}
8122
8123
8124/** Opcode 0x5c. */
8125FNIEMOP_DEF(iemOp_pop_eSP)
8126{
8127 IEMOP_MNEMONIC("pop rSP");
8128 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8129 {
8130 if (pIemCpu->uRexB)
8131 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8132 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8133 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8134 }
8135
8136 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8137 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8138 /** @todo add testcase for this instruction. */
8139 switch (pIemCpu->enmEffOpSize)
8140 {
8141 case IEMMODE_16BIT:
8142 IEM_MC_BEGIN(0, 1);
8143 IEM_MC_LOCAL(uint16_t, u16Dst);
8144 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8145 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8146 IEM_MC_ADVANCE_RIP();
8147 IEM_MC_END();
8148 break;
8149
8150 case IEMMODE_32BIT:
8151 IEM_MC_BEGIN(0, 1);
8152 IEM_MC_LOCAL(uint32_t, u32Dst);
8153 IEM_MC_POP_U32(&u32Dst);
8154 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8155 IEM_MC_ADVANCE_RIP();
8156 IEM_MC_END();
8157 break;
8158
8159 case IEMMODE_64BIT:
8160 IEM_MC_BEGIN(0, 1);
8161 IEM_MC_LOCAL(uint64_t, u64Dst);
8162 IEM_MC_POP_U64(&u64Dst);
8163 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8164 IEM_MC_ADVANCE_RIP();
8165 IEM_MC_END();
8166 break;
8167 }
8168
8169 return VINF_SUCCESS;
8170}
8171
8172
8173/** Opcode 0x5d. */
8174FNIEMOP_DEF(iemOp_pop_eBP)
8175{
8176 IEMOP_MNEMONIC("pop rBP");
8177 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8178}
8179
8180
8181/** Opcode 0x5e. */
8182FNIEMOP_DEF(iemOp_pop_eSI)
8183{
8184 IEMOP_MNEMONIC("pop rSI");
8185 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8186}
8187
8188
8189/** Opcode 0x5f. */
8190FNIEMOP_DEF(iemOp_pop_eDI)
8191{
8192 IEMOP_MNEMONIC("pop rDI");
8193 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8194}
8195
8196
8197/** Opcode 0x60. */
8198FNIEMOP_DEF(iemOp_pusha)
8199{
8200 IEMOP_MNEMONIC("pusha");
8201 IEMOP_HLP_MIN_186();
8202 IEMOP_HLP_NO_64BIT();
8203 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8204 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8205 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8206 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8207}
8208
8209
8210/** Opcode 0x61. */
8211FNIEMOP_DEF(iemOp_popa)
8212{
8213 IEMOP_MNEMONIC("popa");
8214 IEMOP_HLP_MIN_186();
8215 IEMOP_HLP_NO_64BIT();
8216 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8217 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8218 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8219 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8220}
8221
8222
8223/** Opcode 0x62. */
8224FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8225// IEMOP_HLP_MIN_186();
8226
8227
8228/** Opcode 0x63 - non-64-bit modes. */
8229FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8230{
8231 IEMOP_MNEMONIC("arpl Ew,Gw");
8232 IEMOP_HLP_MIN_286();
8233 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8235
8236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8237 {
8238 /* Register */
8239 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8240 IEM_MC_BEGIN(3, 0);
8241 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8242 IEM_MC_ARG(uint16_t, u16Src, 1);
8243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8244
8245 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8246 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8247 IEM_MC_REF_EFLAGS(pEFlags);
8248 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8249
8250 IEM_MC_ADVANCE_RIP();
8251 IEM_MC_END();
8252 }
8253 else
8254 {
8255 /* Memory */
8256 IEM_MC_BEGIN(3, 2);
8257 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8258 IEM_MC_ARG(uint16_t, u16Src, 1);
8259 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8261
8262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8263 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8264 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8265 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8266 IEM_MC_FETCH_EFLAGS(EFlags);
8267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8268
8269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8270 IEM_MC_COMMIT_EFLAGS(EFlags);
8271 IEM_MC_ADVANCE_RIP();
8272 IEM_MC_END();
8273 }
8274 return VINF_SUCCESS;
8275
8276}
8277
8278
8279/** Opcode 0x63.
8280 * @note This is a weird one. It works like a regular move instruction if
8281 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8282 * @todo This definitely needs a testcase to verify the odd cases. */
8283FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8284{
8285 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8286
8287 IEMOP_MNEMONIC("movsxd Gv,Ev");
8288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8289
8290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8291 {
8292 /*
8293 * Register to register.
8294 */
8295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8296 IEM_MC_BEGIN(0, 1);
8297 IEM_MC_LOCAL(uint64_t, u64Value);
8298 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8299 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8300 IEM_MC_ADVANCE_RIP();
8301 IEM_MC_END();
8302 }
8303 else
8304 {
8305 /*
8306 * We're loading a register from memory.
8307 */
8308 IEM_MC_BEGIN(0, 2);
8309 IEM_MC_LOCAL(uint64_t, u64Value);
8310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8314 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8315 IEM_MC_ADVANCE_RIP();
8316 IEM_MC_END();
8317 }
8318 return VINF_SUCCESS;
8319}
8320
8321
8322/** Opcode 0x64. */
8323FNIEMOP_DEF(iemOp_seg_FS)
8324{
8325 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8326 IEMOP_HLP_MIN_386();
8327
8328 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8329 pIemCpu->iEffSeg = X86_SREG_FS;
8330
8331 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8332 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8333}
8334
8335
8336/** Opcode 0x65. */
8337FNIEMOP_DEF(iemOp_seg_GS)
8338{
8339 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8340 IEMOP_HLP_MIN_386();
8341
8342 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8343 pIemCpu->iEffSeg = X86_SREG_GS;
8344
8345 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8346 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8347}
8348
8349
8350/** Opcode 0x66. */
8351FNIEMOP_DEF(iemOp_op_size)
8352{
8353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8354 IEMOP_HLP_MIN_386();
8355
8356 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8357 iemRecalEffOpSize(pIemCpu);
8358
8359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8361}
8362
8363
8364/** Opcode 0x67. */
8365FNIEMOP_DEF(iemOp_addr_size)
8366{
8367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8368 IEMOP_HLP_MIN_386();
8369
8370 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8371 switch (pIemCpu->enmDefAddrMode)
8372 {
8373 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8374 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8375 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8376 default: AssertFailed();
8377 }
8378
8379 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8380 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8381}
8382
8383
8384/** Opcode 0x68. */
8385FNIEMOP_DEF(iemOp_push_Iz)
8386{
8387 IEMOP_MNEMONIC("push Iz");
8388 IEMOP_HLP_MIN_186();
8389 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8390 switch (pIemCpu->enmEffOpSize)
8391 {
8392 case IEMMODE_16BIT:
8393 {
8394 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8395 IEMOP_HLP_NO_LOCK_PREFIX();
8396 IEM_MC_BEGIN(0,0);
8397 IEM_MC_PUSH_U16(u16Imm);
8398 IEM_MC_ADVANCE_RIP();
8399 IEM_MC_END();
8400 return VINF_SUCCESS;
8401 }
8402
8403 case IEMMODE_32BIT:
8404 {
8405 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8406 IEMOP_HLP_NO_LOCK_PREFIX();
8407 IEM_MC_BEGIN(0,0);
8408 IEM_MC_PUSH_U32(u32Imm);
8409 IEM_MC_ADVANCE_RIP();
8410 IEM_MC_END();
8411 return VINF_SUCCESS;
8412 }
8413
8414 case IEMMODE_64BIT:
8415 {
8416 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8417 IEMOP_HLP_NO_LOCK_PREFIX();
8418 IEM_MC_BEGIN(0,0);
8419 IEM_MC_PUSH_U64(u64Imm);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 return VINF_SUCCESS;
8423 }
8424
8425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8426 }
8427}
8428
8429
8430/** Opcode 0x69. */
8431FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8432{
8433 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8434 IEMOP_HLP_MIN_186();
8435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8437
8438 switch (pIemCpu->enmEffOpSize)
8439 {
8440 case IEMMODE_16BIT:
8441 {
8442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8443 {
8444 /* register operand */
8445 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8447
8448 IEM_MC_BEGIN(3, 1);
8449 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8450 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8451 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8452 IEM_MC_LOCAL(uint16_t, u16Tmp);
8453
8454 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8455 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8456 IEM_MC_REF_EFLAGS(pEFlags);
8457 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8458 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8459
8460 IEM_MC_ADVANCE_RIP();
8461 IEM_MC_END();
8462 }
8463 else
8464 {
8465 /* memory operand */
8466 IEM_MC_BEGIN(3, 2);
8467 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8468 IEM_MC_ARG(uint16_t, u16Src, 1);
8469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8470 IEM_MC_LOCAL(uint16_t, u16Tmp);
8471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8472
8473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8474 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8475 IEM_MC_ASSIGN(u16Src, u16Imm);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8478 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8479 IEM_MC_REF_EFLAGS(pEFlags);
8480 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8481 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8482
8483 IEM_MC_ADVANCE_RIP();
8484 IEM_MC_END();
8485 }
8486 return VINF_SUCCESS;
8487 }
8488
8489 case IEMMODE_32BIT:
8490 {
8491 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8492 {
8493 /* register operand */
8494 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8496
8497 IEM_MC_BEGIN(3, 1);
8498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8499 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8501 IEM_MC_LOCAL(uint32_t, u32Tmp);
8502
8503 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8504 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8505 IEM_MC_REF_EFLAGS(pEFlags);
8506 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8507 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8508
8509 IEM_MC_ADVANCE_RIP();
8510 IEM_MC_END();
8511 }
8512 else
8513 {
8514 /* memory operand */
8515 IEM_MC_BEGIN(3, 2);
8516 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8517 IEM_MC_ARG(uint32_t, u32Src, 1);
8518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8519 IEM_MC_LOCAL(uint32_t, u32Tmp);
8520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8521
8522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8524 IEM_MC_ASSIGN(u32Src, u32Imm);
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8527 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8528 IEM_MC_REF_EFLAGS(pEFlags);
8529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8530 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8531
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 return VINF_SUCCESS;
8536 }
8537
8538 case IEMMODE_64BIT:
8539 {
8540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8541 {
8542 /* register operand */
8543 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8545
8546 IEM_MC_BEGIN(3, 1);
8547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8548 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8549 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8550 IEM_MC_LOCAL(uint64_t, u64Tmp);
8551
8552 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8553 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8554 IEM_MC_REF_EFLAGS(pEFlags);
8555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8556 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8557
8558 IEM_MC_ADVANCE_RIP();
8559 IEM_MC_END();
8560 }
8561 else
8562 {
8563 /* memory operand */
8564 IEM_MC_BEGIN(3, 2);
8565 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8566 IEM_MC_ARG(uint64_t, u64Src, 1);
8567 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8568 IEM_MC_LOCAL(uint64_t, u64Tmp);
8569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8570
8571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8572 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8573 IEM_MC_ASSIGN(u64Src, u64Imm);
8574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8575 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8576 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8577 IEM_MC_REF_EFLAGS(pEFlags);
8578 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8579 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8580
8581 IEM_MC_ADVANCE_RIP();
8582 IEM_MC_END();
8583 }
8584 return VINF_SUCCESS;
8585 }
8586 }
8587 AssertFailedReturn(VERR_IEM_IPE_9);
8588}
8589
8590
8591/** Opcode 0x6a. */
8592FNIEMOP_DEF(iemOp_push_Ib)
8593{
8594 IEMOP_MNEMONIC("push Ib");
8595 IEMOP_HLP_MIN_186();
8596 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8597 IEMOP_HLP_NO_LOCK_PREFIX();
8598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8599
8600 IEM_MC_BEGIN(0,0);
8601 switch (pIemCpu->enmEffOpSize)
8602 {
8603 case IEMMODE_16BIT:
8604 IEM_MC_PUSH_U16(i8Imm);
8605 break;
8606 case IEMMODE_32BIT:
8607 IEM_MC_PUSH_U32(i8Imm);
8608 break;
8609 case IEMMODE_64BIT:
8610 IEM_MC_PUSH_U64(i8Imm);
8611 break;
8612 }
8613 IEM_MC_ADVANCE_RIP();
8614 IEM_MC_END();
8615 return VINF_SUCCESS;
8616}
8617
8618
8619/** Opcode 0x6b. */
8620FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8621{
8622 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8623 IEMOP_HLP_MIN_186();
8624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8626
8627 switch (pIemCpu->enmEffOpSize)
8628 {
8629 case IEMMODE_16BIT:
8630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8631 {
8632 /* register operand */
8633 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8635
8636 IEM_MC_BEGIN(3, 1);
8637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8638 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8640 IEM_MC_LOCAL(uint16_t, u16Tmp);
8641
8642 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8643 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8644 IEM_MC_REF_EFLAGS(pEFlags);
8645 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8646 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8647
8648 IEM_MC_ADVANCE_RIP();
8649 IEM_MC_END();
8650 }
8651 else
8652 {
8653 /* memory operand */
8654 IEM_MC_BEGIN(3, 2);
8655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8656 IEM_MC_ARG(uint16_t, u16Src, 1);
8657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8658 IEM_MC_LOCAL(uint16_t, u16Tmp);
8659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8660
8661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8662 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8663 IEM_MC_ASSIGN(u16Src, u16Imm);
8664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8665 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8666 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8667 IEM_MC_REF_EFLAGS(pEFlags);
8668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8669 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8670
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 }
8674 return VINF_SUCCESS;
8675
8676 case IEMMODE_32BIT:
8677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8678 {
8679 /* register operand */
8680 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8682
8683 IEM_MC_BEGIN(3, 1);
8684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8685 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8687 IEM_MC_LOCAL(uint32_t, u32Tmp);
8688
8689 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8690 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8691 IEM_MC_REF_EFLAGS(pEFlags);
8692 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8693 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8694
8695 IEM_MC_ADVANCE_RIP();
8696 IEM_MC_END();
8697 }
8698 else
8699 {
8700 /* memory operand */
8701 IEM_MC_BEGIN(3, 2);
8702 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8703 IEM_MC_ARG(uint32_t, u32Src, 1);
8704 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8705 IEM_MC_LOCAL(uint32_t, u32Tmp);
8706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8707
8708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8709 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8710 IEM_MC_ASSIGN(u32Src, u32Imm);
8711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8712 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8713 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8714 IEM_MC_REF_EFLAGS(pEFlags);
8715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8716 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8717
8718 IEM_MC_ADVANCE_RIP();
8719 IEM_MC_END();
8720 }
8721 return VINF_SUCCESS;
8722
8723 case IEMMODE_64BIT:
8724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8725 {
8726 /* register operand */
8727 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8729
8730 IEM_MC_BEGIN(3, 1);
8731 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8732 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8734 IEM_MC_LOCAL(uint64_t, u64Tmp);
8735
8736 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8737 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8738 IEM_MC_REF_EFLAGS(pEFlags);
8739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8740 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8741
8742 IEM_MC_ADVANCE_RIP();
8743 IEM_MC_END();
8744 }
8745 else
8746 {
8747 /* memory operand */
8748 IEM_MC_BEGIN(3, 2);
8749 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8750 IEM_MC_ARG(uint64_t, u64Src, 1);
8751 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8752 IEM_MC_LOCAL(uint64_t, u64Tmp);
8753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8754
8755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8756 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8757 IEM_MC_ASSIGN(u64Src, u64Imm);
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8760 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8761 IEM_MC_REF_EFLAGS(pEFlags);
8762 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8763 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8764
8765 IEM_MC_ADVANCE_RIP();
8766 IEM_MC_END();
8767 }
8768 return VINF_SUCCESS;
8769 }
8770 AssertFailedReturn(VERR_IEM_IPE_8);
8771}
8772
8773
8774/** Opcode 0x6c. */
8775FNIEMOP_DEF(iemOp_insb_Yb_DX)
8776{
8777 IEMOP_HLP_MIN_186();
8778 IEMOP_HLP_NO_LOCK_PREFIX();
8779 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8780 {
8781 IEMOP_MNEMONIC("rep ins Yb,DX");
8782 switch (pIemCpu->enmEffAddrMode)
8783 {
8784 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8785 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8786 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8788 }
8789 }
8790 else
8791 {
8792 IEMOP_MNEMONIC("ins Yb,DX");
8793 switch (pIemCpu->enmEffAddrMode)
8794 {
8795 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8796 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8797 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8799 }
8800 }
8801}
8802
8803
8804/** Opcode 0x6d. */
8805FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8806{
8807 IEMOP_HLP_MIN_186();
8808 IEMOP_HLP_NO_LOCK_PREFIX();
8809 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8810 {
8811 IEMOP_MNEMONIC("rep ins Yv,DX");
8812 switch (pIemCpu->enmEffOpSize)
8813 {
8814 case IEMMODE_16BIT:
8815 switch (pIemCpu->enmEffAddrMode)
8816 {
8817 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8818 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8819 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8821 }
8822 break;
8823 case IEMMODE_64BIT:
8824 case IEMMODE_32BIT:
8825 switch (pIemCpu->enmEffAddrMode)
8826 {
8827 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8828 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8829 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8831 }
8832 break;
8833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8834 }
8835 }
8836 else
8837 {
8838 IEMOP_MNEMONIC("ins Yv,DX");
8839 switch (pIemCpu->enmEffOpSize)
8840 {
8841 case IEMMODE_16BIT:
8842 switch (pIemCpu->enmEffAddrMode)
8843 {
8844 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8845 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8846 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8848 }
8849 break;
8850 case IEMMODE_64BIT:
8851 case IEMMODE_32BIT:
8852 switch (pIemCpu->enmEffAddrMode)
8853 {
8854 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8855 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8856 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8858 }
8859 break;
8860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8861 }
8862 }
8863}
8864
8865
8866/** Opcode 0x6e. */
8867FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8868{
8869 IEMOP_HLP_MIN_186();
8870 IEMOP_HLP_NO_LOCK_PREFIX();
8871 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8872 {
8873 IEMOP_MNEMONIC("rep outs DX,Yb");
8874 switch (pIemCpu->enmEffAddrMode)
8875 {
8876 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8877 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8878 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8880 }
8881 }
8882 else
8883 {
8884 IEMOP_MNEMONIC("outs DX,Yb");
8885 switch (pIemCpu->enmEffAddrMode)
8886 {
8887 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8888 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8889 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8891 }
8892 }
8893}
8894
8895
8896/** Opcode 0x6f. */
8897FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8898{
8899 IEMOP_HLP_MIN_186();
8900 IEMOP_HLP_NO_LOCK_PREFIX();
8901 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8902 {
8903 IEMOP_MNEMONIC("rep outs DX,Yv");
8904 switch (pIemCpu->enmEffOpSize)
8905 {
8906 case IEMMODE_16BIT:
8907 switch (pIemCpu->enmEffAddrMode)
8908 {
8909 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8910 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8911 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8913 }
8914 break;
8915 case IEMMODE_64BIT:
8916 case IEMMODE_32BIT:
8917 switch (pIemCpu->enmEffAddrMode)
8918 {
8919 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8920 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8921 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8923 }
8924 break;
8925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8926 }
8927 }
8928 else
8929 {
8930 IEMOP_MNEMONIC("outs DX,Yv");
8931 switch (pIemCpu->enmEffOpSize)
8932 {
8933 case IEMMODE_16BIT:
8934 switch (pIemCpu->enmEffAddrMode)
8935 {
8936 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8937 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8938 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8940 }
8941 break;
8942 case IEMMODE_64BIT:
8943 case IEMMODE_32BIT:
8944 switch (pIemCpu->enmEffAddrMode)
8945 {
8946 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8947 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8948 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8950 }
8951 break;
8952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8953 }
8954 }
8955}
8956
8957
8958/** Opcode 0x70. */
8959FNIEMOP_DEF(iemOp_jo_Jb)
8960{
8961 IEMOP_MNEMONIC("jo Jb");
8962 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8963 IEMOP_HLP_NO_LOCK_PREFIX();
8964 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8965
8966 IEM_MC_BEGIN(0, 0);
8967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8968 IEM_MC_REL_JMP_S8(i8Imm);
8969 } IEM_MC_ELSE() {
8970 IEM_MC_ADVANCE_RIP();
8971 } IEM_MC_ENDIF();
8972 IEM_MC_END();
8973 return VINF_SUCCESS;
8974}
8975
8976
8977/** Opcode 0x71. */
8978FNIEMOP_DEF(iemOp_jno_Jb)
8979{
8980 IEMOP_MNEMONIC("jno Jb");
8981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8982 IEMOP_HLP_NO_LOCK_PREFIX();
8983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8984
8985 IEM_MC_BEGIN(0, 0);
8986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8987 IEM_MC_ADVANCE_RIP();
8988 } IEM_MC_ELSE() {
8989 IEM_MC_REL_JMP_S8(i8Imm);
8990 } IEM_MC_ENDIF();
8991 IEM_MC_END();
8992 return VINF_SUCCESS;
8993}
8994
8995/** Opcode 0x72. */
8996FNIEMOP_DEF(iemOp_jc_Jb)
8997{
8998 IEMOP_MNEMONIC("jc/jnae Jb");
8999 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9000 IEMOP_HLP_NO_LOCK_PREFIX();
9001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9002
9003 IEM_MC_BEGIN(0, 0);
9004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9005 IEM_MC_REL_JMP_S8(i8Imm);
9006 } IEM_MC_ELSE() {
9007 IEM_MC_ADVANCE_RIP();
9008 } IEM_MC_ENDIF();
9009 IEM_MC_END();
9010 return VINF_SUCCESS;
9011}
9012
9013
9014/** Opcode 0x73. */
9015FNIEMOP_DEF(iemOp_jnc_Jb)
9016{
9017 IEMOP_MNEMONIC("jnc/jnb Jb");
9018 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9019 IEMOP_HLP_NO_LOCK_PREFIX();
9020 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9021
9022 IEM_MC_BEGIN(0, 0);
9023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9024 IEM_MC_ADVANCE_RIP();
9025 } IEM_MC_ELSE() {
9026 IEM_MC_REL_JMP_S8(i8Imm);
9027 } IEM_MC_ENDIF();
9028 IEM_MC_END();
9029 return VINF_SUCCESS;
9030}
9031
9032
9033/** Opcode 0x74. */
9034FNIEMOP_DEF(iemOp_je_Jb)
9035{
9036 IEMOP_MNEMONIC("je/jz Jb");
9037 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9038 IEMOP_HLP_NO_LOCK_PREFIX();
9039 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9040
9041 IEM_MC_BEGIN(0, 0);
9042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9043 IEM_MC_REL_JMP_S8(i8Imm);
9044 } IEM_MC_ELSE() {
9045 IEM_MC_ADVANCE_RIP();
9046 } IEM_MC_ENDIF();
9047 IEM_MC_END();
9048 return VINF_SUCCESS;
9049}
9050
9051
9052/** Opcode 0x75. */
9053FNIEMOP_DEF(iemOp_jne_Jb)
9054{
9055 IEMOP_MNEMONIC("jne/jnz Jb");
9056 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9057 IEMOP_HLP_NO_LOCK_PREFIX();
9058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9059
9060 IEM_MC_BEGIN(0, 0);
9061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9062 IEM_MC_ADVANCE_RIP();
9063 } IEM_MC_ELSE() {
9064 IEM_MC_REL_JMP_S8(i8Imm);
9065 } IEM_MC_ENDIF();
9066 IEM_MC_END();
9067 return VINF_SUCCESS;
9068}
9069
9070
9071/** Opcode 0x76. */
9072FNIEMOP_DEF(iemOp_jbe_Jb)
9073{
9074 IEMOP_MNEMONIC("jbe/jna Jb");
9075 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9076 IEMOP_HLP_NO_LOCK_PREFIX();
9077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9078
9079 IEM_MC_BEGIN(0, 0);
9080 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9081 IEM_MC_REL_JMP_S8(i8Imm);
9082 } IEM_MC_ELSE() {
9083 IEM_MC_ADVANCE_RIP();
9084 } IEM_MC_ENDIF();
9085 IEM_MC_END();
9086 return VINF_SUCCESS;
9087}
9088
9089
9090/** Opcode 0x77. */
9091FNIEMOP_DEF(iemOp_jnbe_Jb)
9092{
9093 IEMOP_MNEMONIC("jnbe/ja Jb");
9094 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9095 IEMOP_HLP_NO_LOCK_PREFIX();
9096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9097
9098 IEM_MC_BEGIN(0, 0);
9099 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9100 IEM_MC_ADVANCE_RIP();
9101 } IEM_MC_ELSE() {
9102 IEM_MC_REL_JMP_S8(i8Imm);
9103 } IEM_MC_ENDIF();
9104 IEM_MC_END();
9105 return VINF_SUCCESS;
9106}
9107
9108
9109/** Opcode 0x78. */
9110FNIEMOP_DEF(iemOp_js_Jb)
9111{
9112 IEMOP_MNEMONIC("js Jb");
9113 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9114 IEMOP_HLP_NO_LOCK_PREFIX();
9115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9116
9117 IEM_MC_BEGIN(0, 0);
9118 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9119 IEM_MC_REL_JMP_S8(i8Imm);
9120 } IEM_MC_ELSE() {
9121 IEM_MC_ADVANCE_RIP();
9122 } IEM_MC_ENDIF();
9123 IEM_MC_END();
9124 return VINF_SUCCESS;
9125}
9126
9127
9128/** Opcode 0x79. */
9129FNIEMOP_DEF(iemOp_jns_Jb)
9130{
9131 IEMOP_MNEMONIC("jns Jb");
9132 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9133 IEMOP_HLP_NO_LOCK_PREFIX();
9134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9135
9136 IEM_MC_BEGIN(0, 0);
9137 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9138 IEM_MC_ADVANCE_RIP();
9139 } IEM_MC_ELSE() {
9140 IEM_MC_REL_JMP_S8(i8Imm);
9141 } IEM_MC_ENDIF();
9142 IEM_MC_END();
9143 return VINF_SUCCESS;
9144}
9145
9146
9147/** Opcode 0x7a. */
9148FNIEMOP_DEF(iemOp_jp_Jb)
9149{
9150 IEMOP_MNEMONIC("jp Jb");
9151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9152 IEMOP_HLP_NO_LOCK_PREFIX();
9153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9154
9155 IEM_MC_BEGIN(0, 0);
9156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9157 IEM_MC_REL_JMP_S8(i8Imm);
9158 } IEM_MC_ELSE() {
9159 IEM_MC_ADVANCE_RIP();
9160 } IEM_MC_ENDIF();
9161 IEM_MC_END();
9162 return VINF_SUCCESS;
9163}
9164
9165
9166/** Opcode 0x7b. */
9167FNIEMOP_DEF(iemOp_jnp_Jb)
9168{
9169 IEMOP_MNEMONIC("jnp Jb");
9170 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9171 IEMOP_HLP_NO_LOCK_PREFIX();
9172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9173
9174 IEM_MC_BEGIN(0, 0);
9175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9176 IEM_MC_ADVANCE_RIP();
9177 } IEM_MC_ELSE() {
9178 IEM_MC_REL_JMP_S8(i8Imm);
9179 } IEM_MC_ENDIF();
9180 IEM_MC_END();
9181 return VINF_SUCCESS;
9182}
9183
9184
9185/** Opcode 0x7c. */
9186FNIEMOP_DEF(iemOp_jl_Jb)
9187{
9188 IEMOP_MNEMONIC("jl/jnge Jb");
9189 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9190 IEMOP_HLP_NO_LOCK_PREFIX();
9191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9192
9193 IEM_MC_BEGIN(0, 0);
9194 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9195 IEM_MC_REL_JMP_S8(i8Imm);
9196 } IEM_MC_ELSE() {
9197 IEM_MC_ADVANCE_RIP();
9198 } IEM_MC_ENDIF();
9199 IEM_MC_END();
9200 return VINF_SUCCESS;
9201}
9202
9203
9204/** Opcode 0x7d. */
9205FNIEMOP_DEF(iemOp_jnl_Jb)
9206{
9207 IEMOP_MNEMONIC("jnl/jge Jb");
9208 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9209 IEMOP_HLP_NO_LOCK_PREFIX();
9210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9211
9212 IEM_MC_BEGIN(0, 0);
9213 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9214 IEM_MC_ADVANCE_RIP();
9215 } IEM_MC_ELSE() {
9216 IEM_MC_REL_JMP_S8(i8Imm);
9217 } IEM_MC_ENDIF();
9218 IEM_MC_END();
9219 return VINF_SUCCESS;
9220}
9221
9222
9223/** Opcode 0x7e. */
9224FNIEMOP_DEF(iemOp_jle_Jb)
9225{
9226 IEMOP_MNEMONIC("jle/jng Jb");
9227 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9228 IEMOP_HLP_NO_LOCK_PREFIX();
9229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9230
9231 IEM_MC_BEGIN(0, 0);
9232 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9233 IEM_MC_REL_JMP_S8(i8Imm);
9234 } IEM_MC_ELSE() {
9235 IEM_MC_ADVANCE_RIP();
9236 } IEM_MC_ENDIF();
9237 IEM_MC_END();
9238 return VINF_SUCCESS;
9239}
9240
9241
9242/** Opcode 0x7f. */
9243FNIEMOP_DEF(iemOp_jnle_Jb)
9244{
9245 IEMOP_MNEMONIC("jnle/jg Jb");
9246 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9247 IEMOP_HLP_NO_LOCK_PREFIX();
9248 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9249
9250 IEM_MC_BEGIN(0, 0);
9251 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9252 IEM_MC_ADVANCE_RIP();
9253 } IEM_MC_ELSE() {
9254 IEM_MC_REL_JMP_S8(i8Imm);
9255 } IEM_MC_ENDIF();
9256 IEM_MC_END();
9257 return VINF_SUCCESS;
9258}
9259
9260
9261/** Opcode 0x80. */
9262FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9263{
9264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9265 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9266 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9267
9268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9269 {
9270 /* register target */
9271 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9272 IEMOP_HLP_NO_LOCK_PREFIX();
9273 IEM_MC_BEGIN(3, 0);
9274 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9275 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9277
9278 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9279 IEM_MC_REF_EFLAGS(pEFlags);
9280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9281
9282 IEM_MC_ADVANCE_RIP();
9283 IEM_MC_END();
9284 }
9285 else
9286 {
9287 /* memory target */
9288 uint32_t fAccess;
9289 if (pImpl->pfnLockedU8)
9290 fAccess = IEM_ACCESS_DATA_RW;
9291 else
9292 { /* CMP */
9293 IEMOP_HLP_NO_LOCK_PREFIX();
9294 fAccess = IEM_ACCESS_DATA_R;
9295 }
9296 IEM_MC_BEGIN(3, 2);
9297 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9300
9301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9302 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9303 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9304
9305 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9306 IEM_MC_FETCH_EFLAGS(EFlags);
9307 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9309 else
9310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9311
9312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9313 IEM_MC_COMMIT_EFLAGS(EFlags);
9314 IEM_MC_ADVANCE_RIP();
9315 IEM_MC_END();
9316 }
9317 return VINF_SUCCESS;
9318}
9319
9320
9321/** Opcode 0x81. */
9322FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9323{
9324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9325 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9326 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9327
9328 switch (pIemCpu->enmEffOpSize)
9329 {
9330 case IEMMODE_16BIT:
9331 {
9332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9333 {
9334 /* register target */
9335 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9336 IEMOP_HLP_NO_LOCK_PREFIX();
9337 IEM_MC_BEGIN(3, 0);
9338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9339 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9341
9342 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9343 IEM_MC_REF_EFLAGS(pEFlags);
9344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9345
9346 IEM_MC_ADVANCE_RIP();
9347 IEM_MC_END();
9348 }
9349 else
9350 {
9351 /* memory target */
9352 uint32_t fAccess;
9353 if (pImpl->pfnLockedU16)
9354 fAccess = IEM_ACCESS_DATA_RW;
9355 else
9356 { /* CMP, TEST */
9357 IEMOP_HLP_NO_LOCK_PREFIX();
9358 fAccess = IEM_ACCESS_DATA_R;
9359 }
9360 IEM_MC_BEGIN(3, 2);
9361 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9362 IEM_MC_ARG(uint16_t, u16Src, 1);
9363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9365
9366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9367 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9368 IEM_MC_ASSIGN(u16Src, u16Imm);
9369 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9370 IEM_MC_FETCH_EFLAGS(EFlags);
9371 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9373 else
9374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9375
9376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9377 IEM_MC_COMMIT_EFLAGS(EFlags);
9378 IEM_MC_ADVANCE_RIP();
9379 IEM_MC_END();
9380 }
9381 break;
9382 }
9383
9384 case IEMMODE_32BIT:
9385 {
9386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9387 {
9388 /* register target */
9389 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9390 IEMOP_HLP_NO_LOCK_PREFIX();
9391 IEM_MC_BEGIN(3, 0);
9392 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9393 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9394 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9395
9396 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9397 IEM_MC_REF_EFLAGS(pEFlags);
9398 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9399 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9400
9401 IEM_MC_ADVANCE_RIP();
9402 IEM_MC_END();
9403 }
9404 else
9405 {
9406 /* memory target */
9407 uint32_t fAccess;
9408 if (pImpl->pfnLockedU32)
9409 fAccess = IEM_ACCESS_DATA_RW;
9410 else
9411 { /* CMP, TEST */
9412 IEMOP_HLP_NO_LOCK_PREFIX();
9413 fAccess = IEM_ACCESS_DATA_R;
9414 }
9415 IEM_MC_BEGIN(3, 2);
9416 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9417 IEM_MC_ARG(uint32_t, u32Src, 1);
9418 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9420
9421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9422 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9423 IEM_MC_ASSIGN(u32Src, u32Imm);
9424 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9425 IEM_MC_FETCH_EFLAGS(EFlags);
9426 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9427 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9428 else
9429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9430
9431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9432 IEM_MC_COMMIT_EFLAGS(EFlags);
9433 IEM_MC_ADVANCE_RIP();
9434 IEM_MC_END();
9435 }
9436 break;
9437 }
9438
9439 case IEMMODE_64BIT:
9440 {
9441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9442 {
9443 /* register target */
9444 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9445 IEMOP_HLP_NO_LOCK_PREFIX();
9446 IEM_MC_BEGIN(3, 0);
9447 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9448 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9449 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9450
9451 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9452 IEM_MC_REF_EFLAGS(pEFlags);
9453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9454
9455 IEM_MC_ADVANCE_RIP();
9456 IEM_MC_END();
9457 }
9458 else
9459 {
9460 /* memory target */
9461 uint32_t fAccess;
9462 if (pImpl->pfnLockedU64)
9463 fAccess = IEM_ACCESS_DATA_RW;
9464 else
9465 { /* CMP */
9466 IEMOP_HLP_NO_LOCK_PREFIX();
9467 fAccess = IEM_ACCESS_DATA_R;
9468 }
9469 IEM_MC_BEGIN(3, 2);
9470 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9471 IEM_MC_ARG(uint64_t, u64Src, 1);
9472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9474
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9476 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9477 IEM_MC_ASSIGN(u64Src, u64Imm);
9478 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9479 IEM_MC_FETCH_EFLAGS(EFlags);
9480 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9482 else
9483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9484
9485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9486 IEM_MC_COMMIT_EFLAGS(EFlags);
9487 IEM_MC_ADVANCE_RIP();
9488 IEM_MC_END();
9489 }
9490 break;
9491 }
9492 }
9493 return VINF_SUCCESS;
9494}
9495
9496
9497/** Opcode 0x82. */
9498FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9499{
9500 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9501 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9502}
9503
9504
9505/** Opcode 0x83. */
9506FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9507{
9508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9509 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9510 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9511 to the 386 even if absent in the intel reference manuals and some
9512 3rd party opcode listings. */
9513 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9514
9515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9516 {
9517 /*
9518 * Register target
9519 */
9520 IEMOP_HLP_NO_LOCK_PREFIX();
9521 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9522 switch (pIemCpu->enmEffOpSize)
9523 {
9524 case IEMMODE_16BIT:
9525 {
9526 IEM_MC_BEGIN(3, 0);
9527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9528 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9530
9531 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9532 IEM_MC_REF_EFLAGS(pEFlags);
9533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9534
9535 IEM_MC_ADVANCE_RIP();
9536 IEM_MC_END();
9537 break;
9538 }
9539
9540 case IEMMODE_32BIT:
9541 {
9542 IEM_MC_BEGIN(3, 0);
9543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9544 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9546
9547 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9548 IEM_MC_REF_EFLAGS(pEFlags);
9549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9550 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9551
9552 IEM_MC_ADVANCE_RIP();
9553 IEM_MC_END();
9554 break;
9555 }
9556
9557 case IEMMODE_64BIT:
9558 {
9559 IEM_MC_BEGIN(3, 0);
9560 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9561 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9562 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9563
9564 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9565 IEM_MC_REF_EFLAGS(pEFlags);
9566 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9567
9568 IEM_MC_ADVANCE_RIP();
9569 IEM_MC_END();
9570 break;
9571 }
9572 }
9573 }
9574 else
9575 {
9576 /*
9577 * Memory target.
9578 */
9579 uint32_t fAccess;
9580 if (pImpl->pfnLockedU16)
9581 fAccess = IEM_ACCESS_DATA_RW;
9582 else
9583 { /* CMP */
9584 IEMOP_HLP_NO_LOCK_PREFIX();
9585 fAccess = IEM_ACCESS_DATA_R;
9586 }
9587
9588 switch (pIemCpu->enmEffOpSize)
9589 {
9590 case IEMMODE_16BIT:
9591 {
9592 IEM_MC_BEGIN(3, 2);
9593 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9594 IEM_MC_ARG(uint16_t, u16Src, 1);
9595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9597
9598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9599 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9600 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9601 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9602 IEM_MC_FETCH_EFLAGS(EFlags);
9603 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9604 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9605 else
9606 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9607
9608 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9609 IEM_MC_COMMIT_EFLAGS(EFlags);
9610 IEM_MC_ADVANCE_RIP();
9611 IEM_MC_END();
9612 break;
9613 }
9614
9615 case IEMMODE_32BIT:
9616 {
9617 IEM_MC_BEGIN(3, 2);
9618 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9619 IEM_MC_ARG(uint32_t, u32Src, 1);
9620 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9622
9623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9625 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9626 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9627 IEM_MC_FETCH_EFLAGS(EFlags);
9628 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9629 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9630 else
9631 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9632
9633 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9634 IEM_MC_COMMIT_EFLAGS(EFlags);
9635 IEM_MC_ADVANCE_RIP();
9636 IEM_MC_END();
9637 break;
9638 }
9639
9640 case IEMMODE_64BIT:
9641 {
9642 IEM_MC_BEGIN(3, 2);
9643 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9644 IEM_MC_ARG(uint64_t, u64Src, 1);
9645 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9647
9648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9649 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9650 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9651 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9652 IEM_MC_FETCH_EFLAGS(EFlags);
9653 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9655 else
9656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9657
9658 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9659 IEM_MC_COMMIT_EFLAGS(EFlags);
9660 IEM_MC_ADVANCE_RIP();
9661 IEM_MC_END();
9662 break;
9663 }
9664 }
9665 }
9666 return VINF_SUCCESS;
9667}
9668
9669
9670/** Opcode 0x84. */
9671FNIEMOP_DEF(iemOp_test_Eb_Gb)
9672{
9673 IEMOP_MNEMONIC("test Eb,Gb");
9674 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9675 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9676 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9677}
9678
9679
9680/** Opcode 0x85. */
9681FNIEMOP_DEF(iemOp_test_Ev_Gv)
9682{
9683 IEMOP_MNEMONIC("test Ev,Gv");
9684 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9685 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9686 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9687}
9688
9689
9690/** Opcode 0x86. */
9691FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9692{
9693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9694 IEMOP_MNEMONIC("xchg Eb,Gb");
9695
9696 /*
9697 * If rm is denoting a register, no more instruction bytes.
9698 */
9699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9700 {
9701 IEMOP_HLP_NO_LOCK_PREFIX();
9702
9703 IEM_MC_BEGIN(0, 2);
9704 IEM_MC_LOCAL(uint8_t, uTmp1);
9705 IEM_MC_LOCAL(uint8_t, uTmp2);
9706
9707 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9708 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9709 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9710 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9711
9712 IEM_MC_ADVANCE_RIP();
9713 IEM_MC_END();
9714 }
9715 else
9716 {
9717 /*
9718 * We're accessing memory.
9719 */
9720/** @todo the register must be committed separately! */
9721 IEM_MC_BEGIN(2, 2);
9722 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9723 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9725
9726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9727 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9728 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9729 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9731
9732 IEM_MC_ADVANCE_RIP();
9733 IEM_MC_END();
9734 }
9735 return VINF_SUCCESS;
9736}
9737
9738
9739/** Opcode 0x87. */
9740FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9741{
9742 IEMOP_MNEMONIC("xchg Ev,Gv");
9743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9744
9745 /*
9746 * If rm is denoting a register, no more instruction bytes.
9747 */
9748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9749 {
9750 IEMOP_HLP_NO_LOCK_PREFIX();
9751
9752 switch (pIemCpu->enmEffOpSize)
9753 {
9754 case IEMMODE_16BIT:
9755 IEM_MC_BEGIN(0, 2);
9756 IEM_MC_LOCAL(uint16_t, uTmp1);
9757 IEM_MC_LOCAL(uint16_t, uTmp2);
9758
9759 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9760 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9761 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9762 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9763
9764 IEM_MC_ADVANCE_RIP();
9765 IEM_MC_END();
9766 return VINF_SUCCESS;
9767
9768 case IEMMODE_32BIT:
9769 IEM_MC_BEGIN(0, 2);
9770 IEM_MC_LOCAL(uint32_t, uTmp1);
9771 IEM_MC_LOCAL(uint32_t, uTmp2);
9772
9773 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9774 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9775 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9776 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9777
9778 IEM_MC_ADVANCE_RIP();
9779 IEM_MC_END();
9780 return VINF_SUCCESS;
9781
9782 case IEMMODE_64BIT:
9783 IEM_MC_BEGIN(0, 2);
9784 IEM_MC_LOCAL(uint64_t, uTmp1);
9785 IEM_MC_LOCAL(uint64_t, uTmp2);
9786
9787 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9788 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9789 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9790 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9791
9792 IEM_MC_ADVANCE_RIP();
9793 IEM_MC_END();
9794 return VINF_SUCCESS;
9795
9796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9797 }
9798 }
9799 else
9800 {
9801 /*
9802 * We're accessing memory.
9803 */
9804 switch (pIemCpu->enmEffOpSize)
9805 {
9806/** @todo the register must be committed separately! */
9807 case IEMMODE_16BIT:
9808 IEM_MC_BEGIN(2, 2);
9809 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9810 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9812
9813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9814 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9815 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9816 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9817 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9818
9819 IEM_MC_ADVANCE_RIP();
9820 IEM_MC_END();
9821 return VINF_SUCCESS;
9822
9823 case IEMMODE_32BIT:
9824 IEM_MC_BEGIN(2, 2);
9825 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9826 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9828
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9830 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9831 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9832 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9834
9835 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9836 IEM_MC_ADVANCE_RIP();
9837 IEM_MC_END();
9838 return VINF_SUCCESS;
9839
9840 case IEMMODE_64BIT:
9841 IEM_MC_BEGIN(2, 2);
9842 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9843 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9845
9846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9847 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9848 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9849 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9851
9852 IEM_MC_ADVANCE_RIP();
9853 IEM_MC_END();
9854 return VINF_SUCCESS;
9855
9856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9857 }
9858 }
9859}
9860
9861
9862/** Opcode 0x88. */
9863FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9864{
9865 IEMOP_MNEMONIC("mov Eb,Gb");
9866
9867 uint8_t bRm;
9868 IEM_OPCODE_GET_NEXT_U8(&bRm);
9869 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9870
9871 /*
9872 * If rm is denoting a register, no more instruction bytes.
9873 */
9874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9875 {
9876 IEM_MC_BEGIN(0, 1);
9877 IEM_MC_LOCAL(uint8_t, u8Value);
9878 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9879 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 }
9883 else
9884 {
9885 /*
9886 * We're writing a register to memory.
9887 */
9888 IEM_MC_BEGIN(0, 2);
9889 IEM_MC_LOCAL(uint8_t, u8Value);
9890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9892 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9893 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9894 IEM_MC_ADVANCE_RIP();
9895 IEM_MC_END();
9896 }
9897 return VINF_SUCCESS;
9898
9899}
9900
9901
9902/** Opcode 0x89. */
9903FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9904{
9905 IEMOP_MNEMONIC("mov Ev,Gv");
9906
9907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9908 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9909
9910 /*
9911 * If rm is denoting a register, no more instruction bytes.
9912 */
9913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9914 {
9915 switch (pIemCpu->enmEffOpSize)
9916 {
9917 case IEMMODE_16BIT:
9918 IEM_MC_BEGIN(0, 1);
9919 IEM_MC_LOCAL(uint16_t, u16Value);
9920 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9921 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9922 IEM_MC_ADVANCE_RIP();
9923 IEM_MC_END();
9924 break;
9925
9926 case IEMMODE_32BIT:
9927 IEM_MC_BEGIN(0, 1);
9928 IEM_MC_LOCAL(uint32_t, u32Value);
9929 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9930 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9931 IEM_MC_ADVANCE_RIP();
9932 IEM_MC_END();
9933 break;
9934
9935 case IEMMODE_64BIT:
9936 IEM_MC_BEGIN(0, 1);
9937 IEM_MC_LOCAL(uint64_t, u64Value);
9938 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9939 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9940 IEM_MC_ADVANCE_RIP();
9941 IEM_MC_END();
9942 break;
9943 }
9944 }
9945 else
9946 {
9947 /*
9948 * We're writing a register to memory.
9949 */
9950 switch (pIemCpu->enmEffOpSize)
9951 {
9952 case IEMMODE_16BIT:
9953 IEM_MC_BEGIN(0, 2);
9954 IEM_MC_LOCAL(uint16_t, u16Value);
9955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9957 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9958 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9959 IEM_MC_ADVANCE_RIP();
9960 IEM_MC_END();
9961 break;
9962
9963 case IEMMODE_32BIT:
9964 IEM_MC_BEGIN(0, 2);
9965 IEM_MC_LOCAL(uint32_t, u32Value);
9966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9968 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9969 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9970 IEM_MC_ADVANCE_RIP();
9971 IEM_MC_END();
9972 break;
9973
9974 case IEMMODE_64BIT:
9975 IEM_MC_BEGIN(0, 2);
9976 IEM_MC_LOCAL(uint64_t, u64Value);
9977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9979 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9980 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9981 IEM_MC_ADVANCE_RIP();
9982 IEM_MC_END();
9983 break;
9984 }
9985 }
9986 return VINF_SUCCESS;
9987}
9988
9989
9990/** Opcode 0x8a. */
9991FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9992{
9993 IEMOP_MNEMONIC("mov Gb,Eb");
9994
9995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9996 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9997
9998 /*
9999 * If rm is denoting a register, no more instruction bytes.
10000 */
10001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10002 {
10003 IEM_MC_BEGIN(0, 1);
10004 IEM_MC_LOCAL(uint8_t, u8Value);
10005 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10006 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10007 IEM_MC_ADVANCE_RIP();
10008 IEM_MC_END();
10009 }
10010 else
10011 {
10012 /*
10013 * We're loading a register from memory.
10014 */
10015 IEM_MC_BEGIN(0, 2);
10016 IEM_MC_LOCAL(uint8_t, u8Value);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10019 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10020 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10021 IEM_MC_ADVANCE_RIP();
10022 IEM_MC_END();
10023 }
10024 return VINF_SUCCESS;
10025}
10026
10027
10028/** Opcode 0x8b. */
10029FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10030{
10031 IEMOP_MNEMONIC("mov Gv,Ev");
10032
10033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10035
10036 /*
10037 * If rm is denoting a register, no more instruction bytes.
10038 */
10039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10040 {
10041 switch (pIemCpu->enmEffOpSize)
10042 {
10043 case IEMMODE_16BIT:
10044 IEM_MC_BEGIN(0, 1);
10045 IEM_MC_LOCAL(uint16_t, u16Value);
10046 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10047 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10048 IEM_MC_ADVANCE_RIP();
10049 IEM_MC_END();
10050 break;
10051
10052 case IEMMODE_32BIT:
10053 IEM_MC_BEGIN(0, 1);
10054 IEM_MC_LOCAL(uint32_t, u32Value);
10055 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10056 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10057 IEM_MC_ADVANCE_RIP();
10058 IEM_MC_END();
10059 break;
10060
10061 case IEMMODE_64BIT:
10062 IEM_MC_BEGIN(0, 1);
10063 IEM_MC_LOCAL(uint64_t, u64Value);
10064 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10065 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10066 IEM_MC_ADVANCE_RIP();
10067 IEM_MC_END();
10068 break;
10069 }
10070 }
10071 else
10072 {
10073 /*
10074 * We're loading a register from memory.
10075 */
10076 switch (pIemCpu->enmEffOpSize)
10077 {
10078 case IEMMODE_16BIT:
10079 IEM_MC_BEGIN(0, 2);
10080 IEM_MC_LOCAL(uint16_t, u16Value);
10081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10083 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10084 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10085 IEM_MC_ADVANCE_RIP();
10086 IEM_MC_END();
10087 break;
10088
10089 case IEMMODE_32BIT:
10090 IEM_MC_BEGIN(0, 2);
10091 IEM_MC_LOCAL(uint32_t, u32Value);
10092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10094 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10095 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10096 IEM_MC_ADVANCE_RIP();
10097 IEM_MC_END();
10098 break;
10099
10100 case IEMMODE_64BIT:
10101 IEM_MC_BEGIN(0, 2);
10102 IEM_MC_LOCAL(uint64_t, u64Value);
10103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10105 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10106 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10107 IEM_MC_ADVANCE_RIP();
10108 IEM_MC_END();
10109 break;
10110 }
10111 }
10112 return VINF_SUCCESS;
10113}
10114
10115
10116/** Opcode 0x63. */
10117FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10118{
10119 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10120 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10121 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10122 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10123 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10124}
10125
10126
10127/** Opcode 0x8c. */
10128FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10129{
10130 IEMOP_MNEMONIC("mov Ev,Sw");
10131
10132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10133 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10134
10135 /*
10136 * Check that the destination register exists. The REX.R prefix is ignored.
10137 */
10138 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10139 if ( iSegReg > X86_SREG_GS)
10140 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10141
10142 /*
10143 * If rm is denoting a register, no more instruction bytes.
10144 * In that case, the operand size is respected and the upper bits are
10145 * cleared (starting with some pentium).
10146 */
10147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10148 {
10149 switch (pIemCpu->enmEffOpSize)
10150 {
10151 case IEMMODE_16BIT:
10152 IEM_MC_BEGIN(0, 1);
10153 IEM_MC_LOCAL(uint16_t, u16Value);
10154 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10155 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10156 IEM_MC_ADVANCE_RIP();
10157 IEM_MC_END();
10158 break;
10159
10160 case IEMMODE_32BIT:
10161 IEM_MC_BEGIN(0, 1);
10162 IEM_MC_LOCAL(uint32_t, u32Value);
10163 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10164 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10165 IEM_MC_ADVANCE_RIP();
10166 IEM_MC_END();
10167 break;
10168
10169 case IEMMODE_64BIT:
10170 IEM_MC_BEGIN(0, 1);
10171 IEM_MC_LOCAL(uint64_t, u64Value);
10172 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10173 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10174 IEM_MC_ADVANCE_RIP();
10175 IEM_MC_END();
10176 break;
10177 }
10178 }
10179 else
10180 {
10181 /*
10182 * We're saving the register to memory. The access is word sized
10183 * regardless of operand size prefixes.
10184 */
10185#if 0 /* not necessary */
10186 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10187#endif
10188 IEM_MC_BEGIN(0, 2);
10189 IEM_MC_LOCAL(uint16_t, u16Value);
10190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10192 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10193 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10194 IEM_MC_ADVANCE_RIP();
10195 IEM_MC_END();
10196 }
10197 return VINF_SUCCESS;
10198}
10199
10200
10201
10202
10203/** Opcode 0x8d. */
10204FNIEMOP_DEF(iemOp_lea_Gv_M)
10205{
10206 IEMOP_MNEMONIC("lea Gv,M");
10207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10208 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10210 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10211
10212 switch (pIemCpu->enmEffOpSize)
10213 {
10214 case IEMMODE_16BIT:
10215 IEM_MC_BEGIN(0, 2);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10217 IEM_MC_LOCAL(uint16_t, u16Cast);
10218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10219 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10220 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10221 IEM_MC_ADVANCE_RIP();
10222 IEM_MC_END();
10223 return VINF_SUCCESS;
10224
10225 case IEMMODE_32BIT:
10226 IEM_MC_BEGIN(0, 2);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10228 IEM_MC_LOCAL(uint32_t, u32Cast);
10229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10230 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10231 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10232 IEM_MC_ADVANCE_RIP();
10233 IEM_MC_END();
10234 return VINF_SUCCESS;
10235
10236 case IEMMODE_64BIT:
10237 IEM_MC_BEGIN(0, 1);
10238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10240 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10241 IEM_MC_ADVANCE_RIP();
10242 IEM_MC_END();
10243 return VINF_SUCCESS;
10244 }
10245 AssertFailedReturn(VERR_IEM_IPE_7);
10246}
10247
10248
10249/** Opcode 0x8e. */
10250FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10251{
10252 IEMOP_MNEMONIC("mov Sw,Ev");
10253
10254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10255 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10256
10257 /*
10258 * The practical operand size is 16-bit.
10259 */
10260#if 0 /* not necessary */
10261 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10262#endif
10263
10264 /*
10265 * Check that the destination register exists and can be used with this
10266 * instruction. The REX.R prefix is ignored.
10267 */
10268 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10269 if ( iSegReg == X86_SREG_CS
10270 || iSegReg > X86_SREG_GS)
10271 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10272
10273 /*
10274 * If rm is denoting a register, no more instruction bytes.
10275 */
10276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10277 {
10278 IEM_MC_BEGIN(2, 0);
10279 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10280 IEM_MC_ARG(uint16_t, u16Value, 1);
10281 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10282 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10283 IEM_MC_END();
10284 }
10285 else
10286 {
10287 /*
10288 * We're loading the register from memory. The access is word sized
10289 * regardless of operand size prefixes.
10290 */
10291 IEM_MC_BEGIN(2, 1);
10292 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10293 IEM_MC_ARG(uint16_t, u16Value, 1);
10294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10296 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10297 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10298 IEM_MC_END();
10299 }
10300 return VINF_SUCCESS;
10301}
10302
10303
10304/** Opcode 0x8f /0. */
10305FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10306{
10307 /* This bugger is rather annoying as it requires rSP to be updated before
10308 doing the effective address calculations. Will eventually require a
10309 split between the R/M+SIB decoding and the effective address
10310 calculation - which is something that is required for any attempt at
10311 reusing this code for a recompiler. It may also be good to have if we
10312 need to delay #UD exception caused by invalid lock prefixes.
10313
10314 For now, we'll do a mostly safe interpreter-only implementation here. */
10315 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10316 * now until tests show it's checked.. */
10317 IEMOP_MNEMONIC("pop Ev");
10318 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10319
10320 /* Register access is relatively easy and can share code. */
10321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10322 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10323
10324 /*
10325 * Memory target.
10326 *
10327 * Intel says that RSP is incremented before it's used in any effective
10328 * address calcuations. This means some serious extra annoyance here since
10329 * we decode and calculate the effective address in one step and like to
10330 * delay committing registers till everything is done.
10331 *
10332 * So, we'll decode and calculate the effective address twice. This will
10333 * require some recoding if turned into a recompiler.
10334 */
10335 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10336
10337#ifndef TST_IEM_CHECK_MC
10338 /* Calc effective address with modified ESP. */
10339 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10340 RTGCPTR GCPtrEff;
10341 VBOXSTRICTRC rcStrict;
10342 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10343 if (rcStrict != VINF_SUCCESS)
10344 return rcStrict;
10345 pIemCpu->offOpcode = offOpcodeSaved;
10346
10347 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10348 uint64_t const RspSaved = pCtx->rsp;
10349 switch (pIemCpu->enmEffOpSize)
10350 {
10351 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10352 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10353 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10355 }
10356 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10357 Assert(rcStrict == VINF_SUCCESS);
10358 pCtx->rsp = RspSaved;
10359
10360 /* Perform the operation - this should be CImpl. */
10361 RTUINT64U TmpRsp;
10362 TmpRsp.u = pCtx->rsp;
10363 switch (pIemCpu->enmEffOpSize)
10364 {
10365 case IEMMODE_16BIT:
10366 {
10367 uint16_t u16Value;
10368 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10369 if (rcStrict == VINF_SUCCESS)
10370 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10371 break;
10372 }
10373
10374 case IEMMODE_32BIT:
10375 {
10376 uint32_t u32Value;
10377 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10378 if (rcStrict == VINF_SUCCESS)
10379 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10380 break;
10381 }
10382
10383 case IEMMODE_64BIT:
10384 {
10385 uint64_t u64Value;
10386 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10387 if (rcStrict == VINF_SUCCESS)
10388 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10389 break;
10390 }
10391
10392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10393 }
10394 if (rcStrict == VINF_SUCCESS)
10395 {
10396 pCtx->rsp = TmpRsp.u;
10397 iemRegUpdateRipAndClearRF(pIemCpu);
10398 }
10399 return rcStrict;
10400
10401#else
10402 return VERR_IEM_IPE_2;
10403#endif
10404}
10405
10406
10407/** Opcode 0x8f. */
10408FNIEMOP_DEF(iemOp_Grp1A)
10409{
10410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10411 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10412 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10413
10414 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10415 /** @todo XOP decoding. */
10416 IEMOP_MNEMONIC("3-byte-xop");
10417 return IEMOP_RAISE_INVALID_OPCODE();
10418}
10419
10420
10421/**
10422 * Common 'xchg reg,rAX' helper.
10423 */
10424FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10425{
10426 IEMOP_HLP_NO_LOCK_PREFIX();
10427
10428 iReg |= pIemCpu->uRexB;
10429 switch (pIemCpu->enmEffOpSize)
10430 {
10431 case IEMMODE_16BIT:
10432 IEM_MC_BEGIN(0, 2);
10433 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10434 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10435 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10436 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10437 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10438 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10439 IEM_MC_ADVANCE_RIP();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442
10443 case IEMMODE_32BIT:
10444 IEM_MC_BEGIN(0, 2);
10445 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10446 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10447 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10448 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10449 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10450 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10451 IEM_MC_ADVANCE_RIP();
10452 IEM_MC_END();
10453 return VINF_SUCCESS;
10454
10455 case IEMMODE_64BIT:
10456 IEM_MC_BEGIN(0, 2);
10457 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10458 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10459 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10460 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10461 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10462 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 return VINF_SUCCESS;
10466
10467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10468 }
10469}
10470
10471
10472/** Opcode 0x90. */
10473FNIEMOP_DEF(iemOp_nop)
10474{
10475 /* R8/R8D and RAX/EAX can be exchanged. */
10476 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10477 {
10478 IEMOP_MNEMONIC("xchg r8,rAX");
10479 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10480 }
10481
10482 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10483 IEMOP_MNEMONIC("pause");
10484 else
10485 IEMOP_MNEMONIC("nop");
10486 IEM_MC_BEGIN(0, 0);
10487 IEM_MC_ADVANCE_RIP();
10488 IEM_MC_END();
10489 return VINF_SUCCESS;
10490}
10491
10492
10493/** Opcode 0x91. */
10494FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10495{
10496 IEMOP_MNEMONIC("xchg rCX,rAX");
10497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10498}
10499
10500
10501/** Opcode 0x92. */
10502FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10503{
10504 IEMOP_MNEMONIC("xchg rDX,rAX");
10505 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10506}
10507
10508
10509/** Opcode 0x93. */
10510FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10511{
10512 IEMOP_MNEMONIC("xchg rBX,rAX");
10513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10514}
10515
10516
10517/** Opcode 0x94. */
10518FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10519{
10520 IEMOP_MNEMONIC("xchg rSX,rAX");
10521 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10522}
10523
10524
10525/** Opcode 0x95. */
10526FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10527{
10528 IEMOP_MNEMONIC("xchg rBP,rAX");
10529 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10530}
10531
10532
10533/** Opcode 0x96. */
10534FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10535{
10536 IEMOP_MNEMONIC("xchg rSI,rAX");
10537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10538}
10539
10540
10541/** Opcode 0x97. */
10542FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10543{
10544 IEMOP_MNEMONIC("xchg rDI,rAX");
10545 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10546}
10547
10548
10549/** Opcode 0x98. */
10550FNIEMOP_DEF(iemOp_cbw)
10551{
10552 IEMOP_HLP_NO_LOCK_PREFIX();
10553 switch (pIemCpu->enmEffOpSize)
10554 {
10555 case IEMMODE_16BIT:
10556 IEMOP_MNEMONIC("cbw");
10557 IEM_MC_BEGIN(0, 1);
10558 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10559 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10560 } IEM_MC_ELSE() {
10561 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10562 } IEM_MC_ENDIF();
10563 IEM_MC_ADVANCE_RIP();
10564 IEM_MC_END();
10565 return VINF_SUCCESS;
10566
10567 case IEMMODE_32BIT:
10568 IEMOP_MNEMONIC("cwde");
10569 IEM_MC_BEGIN(0, 1);
10570 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10571 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10572 } IEM_MC_ELSE() {
10573 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10574 } IEM_MC_ENDIF();
10575 IEM_MC_ADVANCE_RIP();
10576 IEM_MC_END();
10577 return VINF_SUCCESS;
10578
10579 case IEMMODE_64BIT:
10580 IEMOP_MNEMONIC("cdqe");
10581 IEM_MC_BEGIN(0, 1);
10582 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10583 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10584 } IEM_MC_ELSE() {
10585 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10586 } IEM_MC_ENDIF();
10587 IEM_MC_ADVANCE_RIP();
10588 IEM_MC_END();
10589 return VINF_SUCCESS;
10590
10591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10592 }
10593}
10594
10595
10596/** Opcode 0x99. */
10597FNIEMOP_DEF(iemOp_cwd)
10598{
10599 IEMOP_HLP_NO_LOCK_PREFIX();
10600 switch (pIemCpu->enmEffOpSize)
10601 {
10602 case IEMMODE_16BIT:
10603 IEMOP_MNEMONIC("cwd");
10604 IEM_MC_BEGIN(0, 1);
10605 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10606 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10607 } IEM_MC_ELSE() {
10608 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10609 } IEM_MC_ENDIF();
10610 IEM_MC_ADVANCE_RIP();
10611 IEM_MC_END();
10612 return VINF_SUCCESS;
10613
10614 case IEMMODE_32BIT:
10615 IEMOP_MNEMONIC("cdq");
10616 IEM_MC_BEGIN(0, 1);
10617 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10618 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10619 } IEM_MC_ELSE() {
10620 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10621 } IEM_MC_ENDIF();
10622 IEM_MC_ADVANCE_RIP();
10623 IEM_MC_END();
10624 return VINF_SUCCESS;
10625
10626 case IEMMODE_64BIT:
10627 IEMOP_MNEMONIC("cqo");
10628 IEM_MC_BEGIN(0, 1);
10629 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10630 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10631 } IEM_MC_ELSE() {
10632 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10633 } IEM_MC_ENDIF();
10634 IEM_MC_ADVANCE_RIP();
10635 IEM_MC_END();
10636 return VINF_SUCCESS;
10637
10638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10639 }
10640}
10641
10642
10643/** Opcode 0x9a. */
10644FNIEMOP_DEF(iemOp_call_Ap)
10645{
10646 IEMOP_MNEMONIC("call Ap");
10647 IEMOP_HLP_NO_64BIT();
10648
10649 /* Decode the far pointer address and pass it on to the far call C implementation. */
10650 uint32_t offSeg;
10651 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10652 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10653 else
10654 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10655 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10657 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10658}
10659
10660
10661/** Opcode 0x9b. (aka fwait) */
10662FNIEMOP_DEF(iemOp_wait)
10663{
10664 IEMOP_MNEMONIC("wait");
10665 IEMOP_HLP_NO_LOCK_PREFIX();
10666
10667 IEM_MC_BEGIN(0, 0);
10668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673}
10674
10675
10676/** Opcode 0x9c. */
10677FNIEMOP_DEF(iemOp_pushf_Fv)
10678{
10679 IEMOP_HLP_NO_LOCK_PREFIX();
10680 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10681 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10682}
10683
10684
10685/** Opcode 0x9d. */
10686FNIEMOP_DEF(iemOp_popf_Fv)
10687{
10688 IEMOP_HLP_NO_LOCK_PREFIX();
10689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10690 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10691}
10692
10693
10694/** Opcode 0x9e. */
10695FNIEMOP_DEF(iemOp_sahf)
10696{
10697 IEMOP_MNEMONIC("sahf");
10698 IEMOP_HLP_NO_LOCK_PREFIX();
10699 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10700 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10701 return IEMOP_RAISE_INVALID_OPCODE();
10702 IEM_MC_BEGIN(0, 2);
10703 IEM_MC_LOCAL(uint32_t, u32Flags);
10704 IEM_MC_LOCAL(uint32_t, EFlags);
10705 IEM_MC_FETCH_EFLAGS(EFlags);
10706 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10707 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10708 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10709 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10710 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10711 IEM_MC_COMMIT_EFLAGS(EFlags);
10712 IEM_MC_ADVANCE_RIP();
10713 IEM_MC_END();
10714 return VINF_SUCCESS;
10715}
10716
10717
10718/** Opcode 0x9f. */
10719FNIEMOP_DEF(iemOp_lahf)
10720{
10721 IEMOP_MNEMONIC("lahf");
10722 IEMOP_HLP_NO_LOCK_PREFIX();
10723 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10724 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10725 return IEMOP_RAISE_INVALID_OPCODE();
10726 IEM_MC_BEGIN(0, 1);
10727 IEM_MC_LOCAL(uint8_t, u8Flags);
10728 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10729 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10730 IEM_MC_ADVANCE_RIP();
10731 IEM_MC_END();
10732 return VINF_SUCCESS;
10733}
10734
10735
10736/**
10737 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10738 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10739 * prefixes. Will return on failures.
10740 * @param a_GCPtrMemOff The variable to store the offset in.
10741 */
10742#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10743 do \
10744 { \
10745 switch (pIemCpu->enmEffAddrMode) \
10746 { \
10747 case IEMMODE_16BIT: \
10748 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10749 break; \
10750 case IEMMODE_32BIT: \
10751 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10752 break; \
10753 case IEMMODE_64BIT: \
10754 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10755 break; \
10756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10757 } \
10758 IEMOP_HLP_NO_LOCK_PREFIX(); \
10759 } while (0)
10760
10761/** Opcode 0xa0. */
10762FNIEMOP_DEF(iemOp_mov_Al_Ob)
10763{
10764 /*
10765 * Get the offset and fend of lock prefixes.
10766 */
10767 RTGCPTR GCPtrMemOff;
10768 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10769
10770 /*
10771 * Fetch AL.
10772 */
10773 IEM_MC_BEGIN(0,1);
10774 IEM_MC_LOCAL(uint8_t, u8Tmp);
10775 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10776 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10777 IEM_MC_ADVANCE_RIP();
10778 IEM_MC_END();
10779 return VINF_SUCCESS;
10780}
10781
10782
10783/** Opcode 0xa1. */
10784FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10785{
10786 /*
10787 * Get the offset and fend of lock prefixes.
10788 */
10789 IEMOP_MNEMONIC("mov rAX,Ov");
10790 RTGCPTR GCPtrMemOff;
10791 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10792
10793 /*
10794 * Fetch rAX.
10795 */
10796 switch (pIemCpu->enmEffOpSize)
10797 {
10798 case IEMMODE_16BIT:
10799 IEM_MC_BEGIN(0,1);
10800 IEM_MC_LOCAL(uint16_t, u16Tmp);
10801 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10802 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10803 IEM_MC_ADVANCE_RIP();
10804 IEM_MC_END();
10805 return VINF_SUCCESS;
10806
10807 case IEMMODE_32BIT:
10808 IEM_MC_BEGIN(0,1);
10809 IEM_MC_LOCAL(uint32_t, u32Tmp);
10810 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10811 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 return VINF_SUCCESS;
10815
10816 case IEMMODE_64BIT:
10817 IEM_MC_BEGIN(0,1);
10818 IEM_MC_LOCAL(uint64_t, u64Tmp);
10819 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10820 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 return VINF_SUCCESS;
10824
10825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10826 }
10827}
10828
10829
10830/** Opcode 0xa2. */
10831FNIEMOP_DEF(iemOp_mov_Ob_AL)
10832{
10833 /*
10834 * Get the offset and fend of lock prefixes.
10835 */
10836 RTGCPTR GCPtrMemOff;
10837 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10838
10839 /*
10840 * Store AL.
10841 */
10842 IEM_MC_BEGIN(0,1);
10843 IEM_MC_LOCAL(uint8_t, u8Tmp);
10844 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10845 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10846 IEM_MC_ADVANCE_RIP();
10847 IEM_MC_END();
10848 return VINF_SUCCESS;
10849}
10850
10851
10852/** Opcode 0xa3. */
10853FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10854{
10855 /*
10856 * Get the offset and fend of lock prefixes.
10857 */
10858 RTGCPTR GCPtrMemOff;
10859 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10860
10861 /*
10862 * Store rAX.
10863 */
10864 switch (pIemCpu->enmEffOpSize)
10865 {
10866 case IEMMODE_16BIT:
10867 IEM_MC_BEGIN(0,1);
10868 IEM_MC_LOCAL(uint16_t, u16Tmp);
10869 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10870 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10871 IEM_MC_ADVANCE_RIP();
10872 IEM_MC_END();
10873 return VINF_SUCCESS;
10874
10875 case IEMMODE_32BIT:
10876 IEM_MC_BEGIN(0,1);
10877 IEM_MC_LOCAL(uint32_t, u32Tmp);
10878 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10879 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10880 IEM_MC_ADVANCE_RIP();
10881 IEM_MC_END();
10882 return VINF_SUCCESS;
10883
10884 case IEMMODE_64BIT:
10885 IEM_MC_BEGIN(0,1);
10886 IEM_MC_LOCAL(uint64_t, u64Tmp);
10887 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10888 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10889 IEM_MC_ADVANCE_RIP();
10890 IEM_MC_END();
10891 return VINF_SUCCESS;
10892
10893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10894 }
10895}
10896
10897/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10898#define IEM_MOVS_CASE(ValBits, AddrBits) \
10899 IEM_MC_BEGIN(0, 2); \
10900 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10901 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10902 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10903 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10904 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10905 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10907 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10908 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10909 } IEM_MC_ELSE() { \
10910 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10911 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10912 } IEM_MC_ENDIF(); \
10913 IEM_MC_ADVANCE_RIP(); \
10914 IEM_MC_END();
10915
10916/** Opcode 0xa4. */
10917FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10918{
10919 IEMOP_HLP_NO_LOCK_PREFIX();
10920
10921 /*
10922 * Use the C implementation if a repeat prefix is encountered.
10923 */
10924 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10925 {
10926 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10927 switch (pIemCpu->enmEffAddrMode)
10928 {
10929 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10930 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10931 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10933 }
10934 }
10935 IEMOP_MNEMONIC("movsb Xb,Yb");
10936
10937 /*
10938 * Sharing case implementation with movs[wdq] below.
10939 */
10940 switch (pIemCpu->enmEffAddrMode)
10941 {
10942 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10943 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10944 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10946 }
10947 return VINF_SUCCESS;
10948}
10949
10950
10951/** Opcode 0xa5. */
10952FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10953{
10954 IEMOP_HLP_NO_LOCK_PREFIX();
10955
10956 /*
10957 * Use the C implementation if a repeat prefix is encountered.
10958 */
10959 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10960 {
10961 IEMOP_MNEMONIC("rep movs Xv,Yv");
10962 switch (pIemCpu->enmEffOpSize)
10963 {
10964 case IEMMODE_16BIT:
10965 switch (pIemCpu->enmEffAddrMode)
10966 {
10967 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10968 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10969 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10971 }
10972 break;
10973 case IEMMODE_32BIT:
10974 switch (pIemCpu->enmEffAddrMode)
10975 {
10976 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10977 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10978 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10980 }
10981 case IEMMODE_64BIT:
10982 switch (pIemCpu->enmEffAddrMode)
10983 {
10984 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10985 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10986 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10988 }
10989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10990 }
10991 }
10992 IEMOP_MNEMONIC("movs Xv,Yv");
10993
10994 /*
10995 * Annoying double switch here.
10996 * Using ugly macro for implementing the cases, sharing it with movsb.
10997 */
10998 switch (pIemCpu->enmEffOpSize)
10999 {
11000 case IEMMODE_16BIT:
11001 switch (pIemCpu->enmEffAddrMode)
11002 {
11003 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11004 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11005 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11007 }
11008 break;
11009
11010 case IEMMODE_32BIT:
11011 switch (pIemCpu->enmEffAddrMode)
11012 {
11013 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11014 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11015 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11017 }
11018 break;
11019
11020 case IEMMODE_64BIT:
11021 switch (pIemCpu->enmEffAddrMode)
11022 {
11023 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11024 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11025 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11027 }
11028 break;
11029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11030 }
11031 return VINF_SUCCESS;
11032}
11033
11034#undef IEM_MOVS_CASE
11035
11036/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11037#define IEM_CMPS_CASE(ValBits, AddrBits) \
11038 IEM_MC_BEGIN(3, 3); \
11039 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11040 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11041 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11042 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11043 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11044 \
11045 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11046 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11047 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11048 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11049 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11050 IEM_MC_REF_EFLAGS(pEFlags); \
11051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11052 \
11053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11054 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11055 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11056 } IEM_MC_ELSE() { \
11057 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11058 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11059 } IEM_MC_ENDIF(); \
11060 IEM_MC_ADVANCE_RIP(); \
11061 IEM_MC_END(); \
11062
11063/** Opcode 0xa6. */
11064FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11065{
11066 IEMOP_HLP_NO_LOCK_PREFIX();
11067
11068 /*
11069 * Use the C implementation if a repeat prefix is encountered.
11070 */
11071 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11072 {
11073 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11074 switch (pIemCpu->enmEffAddrMode)
11075 {
11076 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11077 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11078 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11080 }
11081 }
11082 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11083 {
11084 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11085 switch (pIemCpu->enmEffAddrMode)
11086 {
11087 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11088 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11089 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11091 }
11092 }
11093 IEMOP_MNEMONIC("cmps Xb,Yb");
11094
11095 /*
11096 * Sharing case implementation with cmps[wdq] below.
11097 */
11098 switch (pIemCpu->enmEffAddrMode)
11099 {
11100 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11101 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11102 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11104 }
11105 return VINF_SUCCESS;
11106
11107}
11108
11109
11110/** Opcode 0xa7. */
11111FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11112{
11113 IEMOP_HLP_NO_LOCK_PREFIX();
11114
11115 /*
11116 * Use the C implementation if a repeat prefix is encountered.
11117 */
11118 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11119 {
11120 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11121 switch (pIemCpu->enmEffOpSize)
11122 {
11123 case IEMMODE_16BIT:
11124 switch (pIemCpu->enmEffAddrMode)
11125 {
11126 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11127 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11128 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11130 }
11131 break;
11132 case IEMMODE_32BIT:
11133 switch (pIemCpu->enmEffAddrMode)
11134 {
11135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11139 }
11140 case IEMMODE_64BIT:
11141 switch (pIemCpu->enmEffAddrMode)
11142 {
11143 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11147 }
11148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11149 }
11150 }
11151
11152 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11153 {
11154 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11155 switch (pIemCpu->enmEffOpSize)
11156 {
11157 case IEMMODE_16BIT:
11158 switch (pIemCpu->enmEffAddrMode)
11159 {
11160 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11161 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11162 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11164 }
11165 break;
11166 case IEMMODE_32BIT:
11167 switch (pIemCpu->enmEffAddrMode)
11168 {
11169 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11170 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11171 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174 case IEMMODE_64BIT:
11175 switch (pIemCpu->enmEffAddrMode)
11176 {
11177 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11181 }
11182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11183 }
11184 }
11185
11186 IEMOP_MNEMONIC("cmps Xv,Yv");
11187
11188 /*
11189 * Annoying double switch here.
11190 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11191 */
11192 switch (pIemCpu->enmEffOpSize)
11193 {
11194 case IEMMODE_16BIT:
11195 switch (pIemCpu->enmEffAddrMode)
11196 {
11197 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11198 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11199 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 break;
11203
11204 case IEMMODE_32BIT:
11205 switch (pIemCpu->enmEffAddrMode)
11206 {
11207 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11208 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11209 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11211 }
11212 break;
11213
11214 case IEMMODE_64BIT:
11215 switch (pIemCpu->enmEffAddrMode)
11216 {
11217 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11218 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11219 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11221 }
11222 break;
11223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11224 }
11225 return VINF_SUCCESS;
11226
11227}
11228
11229#undef IEM_CMPS_CASE
11230
11231/** Opcode 0xa8. */
11232FNIEMOP_DEF(iemOp_test_AL_Ib)
11233{
11234 IEMOP_MNEMONIC("test al,Ib");
11235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11236 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11237}
11238
11239
11240/** Opcode 0xa9. */
11241FNIEMOP_DEF(iemOp_test_eAX_Iz)
11242{
11243 IEMOP_MNEMONIC("test rAX,Iz");
11244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11246}
11247
11248
11249/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11250#define IEM_STOS_CASE(ValBits, AddrBits) \
11251 IEM_MC_BEGIN(0, 2); \
11252 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11253 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11254 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11255 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11256 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11258 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11259 } IEM_MC_ELSE() { \
11260 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11261 } IEM_MC_ENDIF(); \
11262 IEM_MC_ADVANCE_RIP(); \
11263 IEM_MC_END(); \
11264
11265/** Opcode 0xaa. */
11266FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11267{
11268 IEMOP_HLP_NO_LOCK_PREFIX();
11269
11270 /*
11271 * Use the C implementation if a repeat prefix is encountered.
11272 */
11273 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11274 {
11275 IEMOP_MNEMONIC("rep stos Yb,al");
11276 switch (pIemCpu->enmEffAddrMode)
11277 {
11278 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11279 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11280 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11282 }
11283 }
11284 IEMOP_MNEMONIC("stos Yb,al");
11285
11286 /*
11287 * Sharing case implementation with stos[wdq] below.
11288 */
11289 switch (pIemCpu->enmEffAddrMode)
11290 {
11291 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11292 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11293 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11295 }
11296 return VINF_SUCCESS;
11297}
11298
11299
11300/** Opcode 0xab. */
11301FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11302{
11303 IEMOP_HLP_NO_LOCK_PREFIX();
11304
11305 /*
11306 * Use the C implementation if a repeat prefix is encountered.
11307 */
11308 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11309 {
11310 IEMOP_MNEMONIC("rep stos Yv,rAX");
11311 switch (pIemCpu->enmEffOpSize)
11312 {
11313 case IEMMODE_16BIT:
11314 switch (pIemCpu->enmEffAddrMode)
11315 {
11316 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11317 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11318 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11320 }
11321 break;
11322 case IEMMODE_32BIT:
11323 switch (pIemCpu->enmEffAddrMode)
11324 {
11325 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11326 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11327 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11329 }
11330 case IEMMODE_64BIT:
11331 switch (pIemCpu->enmEffAddrMode)
11332 {
11333 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11334 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11335 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11337 }
11338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11339 }
11340 }
11341 IEMOP_MNEMONIC("stos Yv,rAX");
11342
11343 /*
11344 * Annoying double switch here.
11345 * Using ugly macro for implementing the cases, sharing it with stosb.
11346 */
11347 switch (pIemCpu->enmEffOpSize)
11348 {
11349 case IEMMODE_16BIT:
11350 switch (pIemCpu->enmEffAddrMode)
11351 {
11352 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11353 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11354 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11356 }
11357 break;
11358
11359 case IEMMODE_32BIT:
11360 switch (pIemCpu->enmEffAddrMode)
11361 {
11362 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11363 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11364 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11366 }
11367 break;
11368
11369 case IEMMODE_64BIT:
11370 switch (pIemCpu->enmEffAddrMode)
11371 {
11372 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11373 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11374 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11376 }
11377 break;
11378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11379 }
11380 return VINF_SUCCESS;
11381}
11382
11383#undef IEM_STOS_CASE
11384
11385/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11386#define IEM_LODS_CASE(ValBits, AddrBits) \
11387 IEM_MC_BEGIN(0, 2); \
11388 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11389 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11390 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11391 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11392 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11394 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11395 } IEM_MC_ELSE() { \
11396 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11397 } IEM_MC_ENDIF(); \
11398 IEM_MC_ADVANCE_RIP(); \
11399 IEM_MC_END();
11400
11401/** Opcode 0xac. */
11402FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11403{
11404 IEMOP_HLP_NO_LOCK_PREFIX();
11405
11406 /*
11407 * Use the C implementation if a repeat prefix is encountered.
11408 */
11409 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11410 {
11411 IEMOP_MNEMONIC("rep lodsb al,Xb");
11412 switch (pIemCpu->enmEffAddrMode)
11413 {
11414 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11415 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11416 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11418 }
11419 }
11420 IEMOP_MNEMONIC("lodsb al,Xb");
11421
11422 /*
11423 * Sharing case implementation with stos[wdq] below.
11424 */
11425 switch (pIemCpu->enmEffAddrMode)
11426 {
11427 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11428 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11429 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11431 }
11432 return VINF_SUCCESS;
11433}
11434
11435
11436/** Opcode 0xad. */
11437FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11438{
11439 IEMOP_HLP_NO_LOCK_PREFIX();
11440
11441 /*
11442 * Use the C implementation if a repeat prefix is encountered.
11443 */
11444 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11445 {
11446 IEMOP_MNEMONIC("rep lods rAX,Xv");
11447 switch (pIemCpu->enmEffOpSize)
11448 {
11449 case IEMMODE_16BIT:
11450 switch (pIemCpu->enmEffAddrMode)
11451 {
11452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11456 }
11457 break;
11458 case IEMMODE_32BIT:
11459 switch (pIemCpu->enmEffAddrMode)
11460 {
11461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11465 }
11466 case IEMMODE_64BIT:
11467 switch (pIemCpu->enmEffAddrMode)
11468 {
11469 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11470 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11471 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11473 }
11474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11475 }
11476 }
11477 IEMOP_MNEMONIC("lods rAX,Xv");
11478
11479 /*
11480 * Annoying double switch here.
11481 * Using ugly macro for implementing the cases, sharing it with lodsb.
11482 */
11483 switch (pIemCpu->enmEffOpSize)
11484 {
11485 case IEMMODE_16BIT:
11486 switch (pIemCpu->enmEffAddrMode)
11487 {
11488 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11489 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11490 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11492 }
11493 break;
11494
11495 case IEMMODE_32BIT:
11496 switch (pIemCpu->enmEffAddrMode)
11497 {
11498 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11499 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11500 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11502 }
11503 break;
11504
11505 case IEMMODE_64BIT:
11506 switch (pIemCpu->enmEffAddrMode)
11507 {
11508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11509 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11510 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11512 }
11513 break;
11514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11515 }
11516 return VINF_SUCCESS;
11517}
11518
11519#undef IEM_LODS_CASE
11520
11521/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11522#define IEM_SCAS_CASE(ValBits, AddrBits) \
11523 IEM_MC_BEGIN(3, 2); \
11524 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11525 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11526 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11527 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11528 \
11529 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11530 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11531 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11532 IEM_MC_REF_EFLAGS(pEFlags); \
11533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11534 \
11535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11536 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11537 } IEM_MC_ELSE() { \
11538 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11539 } IEM_MC_ENDIF(); \
11540 IEM_MC_ADVANCE_RIP(); \
11541 IEM_MC_END();
11542
11543/** Opcode 0xae. */
11544FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11545{
11546 IEMOP_HLP_NO_LOCK_PREFIX();
11547
11548 /*
11549 * Use the C implementation if a repeat prefix is encountered.
11550 */
11551 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11552 {
11553 IEMOP_MNEMONIC("repe scasb al,Xb");
11554 switch (pIemCpu->enmEffAddrMode)
11555 {
11556 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11557 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11558 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11560 }
11561 }
11562 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11563 {
11564 IEMOP_MNEMONIC("repne scasb al,Xb");
11565 switch (pIemCpu->enmEffAddrMode)
11566 {
11567 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11568 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11569 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11571 }
11572 }
11573 IEMOP_MNEMONIC("scasb al,Xb");
11574
11575 /*
11576 * Sharing case implementation with stos[wdq] below.
11577 */
11578 switch (pIemCpu->enmEffAddrMode)
11579 {
11580 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11581 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11582 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11584 }
11585 return VINF_SUCCESS;
11586}
11587
11588
11589/** Opcode 0xaf. */
11590FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11591{
11592 IEMOP_HLP_NO_LOCK_PREFIX();
11593
11594 /*
11595 * Use the C implementation if a repeat prefix is encountered.
11596 */
11597 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11598 {
11599 IEMOP_MNEMONIC("repe scas rAX,Xv");
11600 switch (pIemCpu->enmEffOpSize)
11601 {
11602 case IEMMODE_16BIT:
11603 switch (pIemCpu->enmEffAddrMode)
11604 {
11605 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11606 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11607 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11609 }
11610 break;
11611 case IEMMODE_32BIT:
11612 switch (pIemCpu->enmEffAddrMode)
11613 {
11614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11618 }
11619 case IEMMODE_64BIT:
11620 switch (pIemCpu->enmEffAddrMode)
11621 {
11622 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11626 }
11627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11628 }
11629 }
11630 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11631 {
11632 IEMOP_MNEMONIC("repne scas rAX,Xv");
11633 switch (pIemCpu->enmEffOpSize)
11634 {
11635 case IEMMODE_16BIT:
11636 switch (pIemCpu->enmEffAddrMode)
11637 {
11638 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11639 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11640 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11642 }
11643 break;
11644 case IEMMODE_32BIT:
11645 switch (pIemCpu->enmEffAddrMode)
11646 {
11647 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11648 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11649 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11651 }
11652 case IEMMODE_64BIT:
11653 switch (pIemCpu->enmEffAddrMode)
11654 {
11655 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11656 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11657 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11659 }
11660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11661 }
11662 }
11663 IEMOP_MNEMONIC("scas rAX,Xv");
11664
11665 /*
11666 * Annoying double switch here.
11667 * Using ugly macro for implementing the cases, sharing it with scasb.
11668 */
11669 switch (pIemCpu->enmEffOpSize)
11670 {
11671 case IEMMODE_16BIT:
11672 switch (pIemCpu->enmEffAddrMode)
11673 {
11674 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11675 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11676 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11678 }
11679 break;
11680
11681 case IEMMODE_32BIT:
11682 switch (pIemCpu->enmEffAddrMode)
11683 {
11684 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11685 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11686 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11688 }
11689 break;
11690
11691 case IEMMODE_64BIT:
11692 switch (pIemCpu->enmEffAddrMode)
11693 {
11694 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11695 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11696 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11698 }
11699 break;
11700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11701 }
11702 return VINF_SUCCESS;
11703}
11704
11705#undef IEM_SCAS_CASE
11706
11707/**
11708 * Common 'mov r8, imm8' helper.
11709 */
11710FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11711{
11712 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11713 IEMOP_HLP_NO_LOCK_PREFIX();
11714
11715 IEM_MC_BEGIN(0, 1);
11716 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11717 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11718 IEM_MC_ADVANCE_RIP();
11719 IEM_MC_END();
11720
11721 return VINF_SUCCESS;
11722}
11723
11724
11725/** Opcode 0xb0. */
11726FNIEMOP_DEF(iemOp_mov_AL_Ib)
11727{
11728 IEMOP_MNEMONIC("mov AL,Ib");
11729 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11730}
11731
11732
11733/** Opcode 0xb1. */
11734FNIEMOP_DEF(iemOp_CL_Ib)
11735{
11736 IEMOP_MNEMONIC("mov CL,Ib");
11737 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11738}
11739
11740
11741/** Opcode 0xb2. */
11742FNIEMOP_DEF(iemOp_DL_Ib)
11743{
11744 IEMOP_MNEMONIC("mov DL,Ib");
11745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11746}
11747
11748
11749/** Opcode 0xb3. */
11750FNIEMOP_DEF(iemOp_BL_Ib)
11751{
11752 IEMOP_MNEMONIC("mov BL,Ib");
11753 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11754}
11755
11756
11757/** Opcode 0xb4. */
11758FNIEMOP_DEF(iemOp_mov_AH_Ib)
11759{
11760 IEMOP_MNEMONIC("mov AH,Ib");
11761 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11762}
11763
11764
11765/** Opcode 0xb5. */
11766FNIEMOP_DEF(iemOp_CH_Ib)
11767{
11768 IEMOP_MNEMONIC("mov CH,Ib");
11769 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11770}
11771
11772
11773/** Opcode 0xb6. */
11774FNIEMOP_DEF(iemOp_DH_Ib)
11775{
11776 IEMOP_MNEMONIC("mov DH,Ib");
11777 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11778}
11779
11780
11781/** Opcode 0xb7. */
11782FNIEMOP_DEF(iemOp_BH_Ib)
11783{
11784 IEMOP_MNEMONIC("mov BH,Ib");
11785 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11786}
11787
11788
11789/**
11790 * Common 'mov regX,immX' helper.
11791 */
11792FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11793{
11794 switch (pIemCpu->enmEffOpSize)
11795 {
11796 case IEMMODE_16BIT:
11797 {
11798 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11799 IEMOP_HLP_NO_LOCK_PREFIX();
11800
11801 IEM_MC_BEGIN(0, 1);
11802 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11803 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11804 IEM_MC_ADVANCE_RIP();
11805 IEM_MC_END();
11806 break;
11807 }
11808
11809 case IEMMODE_32BIT:
11810 {
11811 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11812 IEMOP_HLP_NO_LOCK_PREFIX();
11813
11814 IEM_MC_BEGIN(0, 1);
11815 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11816 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11817 IEM_MC_ADVANCE_RIP();
11818 IEM_MC_END();
11819 break;
11820 }
11821 case IEMMODE_64BIT:
11822 {
11823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11824 IEMOP_HLP_NO_LOCK_PREFIX();
11825
11826 IEM_MC_BEGIN(0, 1);
11827 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11828 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11829 IEM_MC_ADVANCE_RIP();
11830 IEM_MC_END();
11831 break;
11832 }
11833 }
11834
11835 return VINF_SUCCESS;
11836}
11837
11838
11839/** Opcode 0xb8. */
11840FNIEMOP_DEF(iemOp_eAX_Iv)
11841{
11842 IEMOP_MNEMONIC("mov rAX,IV");
11843 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11844}
11845
11846
11847/** Opcode 0xb9. */
11848FNIEMOP_DEF(iemOp_eCX_Iv)
11849{
11850 IEMOP_MNEMONIC("mov rCX,IV");
11851 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11852}
11853
11854
11855/** Opcode 0xba. */
11856FNIEMOP_DEF(iemOp_eDX_Iv)
11857{
11858 IEMOP_MNEMONIC("mov rDX,IV");
11859 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11860}
11861
11862
11863/** Opcode 0xbb. */
11864FNIEMOP_DEF(iemOp_eBX_Iv)
11865{
11866 IEMOP_MNEMONIC("mov rBX,IV");
11867 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11868}
11869
11870
11871/** Opcode 0xbc. */
11872FNIEMOP_DEF(iemOp_eSP_Iv)
11873{
11874 IEMOP_MNEMONIC("mov rSP,IV");
11875 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11876}
11877
11878
11879/** Opcode 0xbd. */
11880FNIEMOP_DEF(iemOp_eBP_Iv)
11881{
11882 IEMOP_MNEMONIC("mov rBP,IV");
11883 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11884}
11885
11886
11887/** Opcode 0xbe. */
11888FNIEMOP_DEF(iemOp_eSI_Iv)
11889{
11890 IEMOP_MNEMONIC("mov rSI,IV");
11891 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11892}
11893
11894
11895/** Opcode 0xbf. */
11896FNIEMOP_DEF(iemOp_eDI_Iv)
11897{
11898 IEMOP_MNEMONIC("mov rDI,IV");
11899 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11900}
11901
11902
11903/** Opcode 0xc0. */
11904FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11905{
11906 IEMOP_HLP_MIN_186();
11907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11908 PCIEMOPSHIFTSIZES pImpl;
11909 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11910 {
11911 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11912 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11913 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11914 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11915 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11916 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11917 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11918 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11919 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11920 }
11921 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11922
11923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11924 {
11925 /* register */
11926 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11927 IEMOP_HLP_NO_LOCK_PREFIX();
11928 IEM_MC_BEGIN(3, 0);
11929 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11930 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11932 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11933 IEM_MC_REF_EFLAGS(pEFlags);
11934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11935 IEM_MC_ADVANCE_RIP();
11936 IEM_MC_END();
11937 }
11938 else
11939 {
11940 /* memory */
11941 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11942 IEM_MC_BEGIN(3, 2);
11943 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11944 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11945 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11947
11948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11949 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11950 IEM_MC_ASSIGN(cShiftArg, cShift);
11951 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11952 IEM_MC_FETCH_EFLAGS(EFlags);
11953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11954
11955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11956 IEM_MC_COMMIT_EFLAGS(EFlags);
11957 IEM_MC_ADVANCE_RIP();
11958 IEM_MC_END();
11959 }
11960 return VINF_SUCCESS;
11961}
11962
11963
11964/** Opcode 0xc1. */
11965FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11966{
11967 IEMOP_HLP_MIN_186();
11968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11969 PCIEMOPSHIFTSIZES pImpl;
11970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11971 {
11972 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11973 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11974 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11975 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11976 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11977 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11978 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11979 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11981 }
11982 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11983
11984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11985 {
11986 /* register */
11987 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11988 IEMOP_HLP_NO_LOCK_PREFIX();
11989 switch (pIemCpu->enmEffOpSize)
11990 {
11991 case IEMMODE_16BIT:
11992 IEM_MC_BEGIN(3, 0);
11993 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11994 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11996 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11997 IEM_MC_REF_EFLAGS(pEFlags);
11998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11999 IEM_MC_ADVANCE_RIP();
12000 IEM_MC_END();
12001 return VINF_SUCCESS;
12002
12003 case IEMMODE_32BIT:
12004 IEM_MC_BEGIN(3, 0);
12005 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12006 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12007 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12008 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12009 IEM_MC_REF_EFLAGS(pEFlags);
12010 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12011 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12012 IEM_MC_ADVANCE_RIP();
12013 IEM_MC_END();
12014 return VINF_SUCCESS;
12015
12016 case IEMMODE_64BIT:
12017 IEM_MC_BEGIN(3, 0);
12018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12019 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12021 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12022 IEM_MC_REF_EFLAGS(pEFlags);
12023 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12024 IEM_MC_ADVANCE_RIP();
12025 IEM_MC_END();
12026 return VINF_SUCCESS;
12027
12028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12029 }
12030 }
12031 else
12032 {
12033 /* memory */
12034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12035 switch (pIemCpu->enmEffOpSize)
12036 {
12037 case IEMMODE_16BIT:
12038 IEM_MC_BEGIN(3, 2);
12039 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12040 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12041 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12043
12044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12045 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12046 IEM_MC_ASSIGN(cShiftArg, cShift);
12047 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12048 IEM_MC_FETCH_EFLAGS(EFlags);
12049 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12050
12051 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12052 IEM_MC_COMMIT_EFLAGS(EFlags);
12053 IEM_MC_ADVANCE_RIP();
12054 IEM_MC_END();
12055 return VINF_SUCCESS;
12056
12057 case IEMMODE_32BIT:
12058 IEM_MC_BEGIN(3, 2);
12059 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12060 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12061 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12063
12064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12065 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12066 IEM_MC_ASSIGN(cShiftArg, cShift);
12067 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12068 IEM_MC_FETCH_EFLAGS(EFlags);
12069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12070
12071 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12072 IEM_MC_COMMIT_EFLAGS(EFlags);
12073 IEM_MC_ADVANCE_RIP();
12074 IEM_MC_END();
12075 return VINF_SUCCESS;
12076
12077 case IEMMODE_64BIT:
12078 IEM_MC_BEGIN(3, 2);
12079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12080 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12081 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12083
12084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12085 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12086 IEM_MC_ASSIGN(cShiftArg, cShift);
12087 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12088 IEM_MC_FETCH_EFLAGS(EFlags);
12089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12090
12091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12092 IEM_MC_COMMIT_EFLAGS(EFlags);
12093 IEM_MC_ADVANCE_RIP();
12094 IEM_MC_END();
12095 return VINF_SUCCESS;
12096
12097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12098 }
12099 }
12100}
12101
12102
12103/** Opcode 0xc2. */
12104FNIEMOP_DEF(iemOp_retn_Iw)
12105{
12106 IEMOP_MNEMONIC("retn Iw");
12107 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12108 IEMOP_HLP_NO_LOCK_PREFIX();
12109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12110 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12111}
12112
12113
12114/** Opcode 0xc3. */
12115FNIEMOP_DEF(iemOp_retn)
12116{
12117 IEMOP_MNEMONIC("retn");
12118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12119 IEMOP_HLP_NO_LOCK_PREFIX();
12120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12121}
12122
12123
12124/** Opcode 0xc4. */
12125FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12126{
12127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12128 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12129 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12130 {
12131 IEMOP_MNEMONIC("2-byte-vex");
12132 /* The LES instruction is invalid 64-bit mode. In legacy and
12133 compatability mode it is invalid with MOD=3.
12134 The use as a VEX prefix is made possible by assigning the inverted
12135 REX.R to the top MOD bit, and the top bit in the inverted register
12136 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12137 to accessing registers 0..7 in this VEX form. */
12138 /** @todo VEX: Just use new tables for it. */
12139 return IEMOP_RAISE_INVALID_OPCODE();
12140 }
12141 IEMOP_MNEMONIC("les Gv,Mp");
12142 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12143}
12144
12145
12146/** Opcode 0xc5. */
12147FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12148{
12149 /* The LDS instruction is invalid 64-bit mode. In legacy and
12150 compatability mode it is invalid with MOD=3.
12151 The use as a VEX prefix is made possible by assigning the inverted
12152 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12153 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12155 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12156 {
12157 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12158 {
12159 IEMOP_MNEMONIC("lds Gv,Mp");
12160 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12161 }
12162 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12163 }
12164
12165 IEMOP_MNEMONIC("3-byte-vex");
12166 /** @todo Test when exctly the VEX conformance checks kick in during
12167 * instruction decoding and fetching (using \#PF). */
12168 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12169 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12170 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12171#if 0 /* will make sense of this next week... */
12172 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12173 &&
12174 )
12175 {
12176
12177 }
12178#endif
12179
12180 /** @todo VEX: Just use new tables for it. */
12181 return IEMOP_RAISE_INVALID_OPCODE();
12182}
12183
12184
12185/** Opcode 0xc6. */
12186FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12187{
12188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12189 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12190 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12191 return IEMOP_RAISE_INVALID_OPCODE();
12192 IEMOP_MNEMONIC("mov Eb,Ib");
12193
12194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12195 {
12196 /* register access */
12197 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12198 IEM_MC_BEGIN(0, 0);
12199 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12200 IEM_MC_ADVANCE_RIP();
12201 IEM_MC_END();
12202 }
12203 else
12204 {
12205 /* memory access. */
12206 IEM_MC_BEGIN(0, 1);
12207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12209 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12210 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12211 IEM_MC_ADVANCE_RIP();
12212 IEM_MC_END();
12213 }
12214 return VINF_SUCCESS;
12215}
12216
12217
12218/** Opcode 0xc7. */
12219FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12220{
12221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12222 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12223 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12224 return IEMOP_RAISE_INVALID_OPCODE();
12225 IEMOP_MNEMONIC("mov Ev,Iz");
12226
12227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12228 {
12229 /* register access */
12230 switch (pIemCpu->enmEffOpSize)
12231 {
12232 case IEMMODE_16BIT:
12233 IEM_MC_BEGIN(0, 0);
12234 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12235 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12236 IEM_MC_ADVANCE_RIP();
12237 IEM_MC_END();
12238 return VINF_SUCCESS;
12239
12240 case IEMMODE_32BIT:
12241 IEM_MC_BEGIN(0, 0);
12242 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12243 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12244 IEM_MC_ADVANCE_RIP();
12245 IEM_MC_END();
12246 return VINF_SUCCESS;
12247
12248 case IEMMODE_64BIT:
12249 IEM_MC_BEGIN(0, 0);
12250 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12251 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12252 IEM_MC_ADVANCE_RIP();
12253 IEM_MC_END();
12254 return VINF_SUCCESS;
12255
12256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12257 }
12258 }
12259 else
12260 {
12261 /* memory access. */
12262 switch (pIemCpu->enmEffOpSize)
12263 {
12264 case IEMMODE_16BIT:
12265 IEM_MC_BEGIN(0, 1);
12266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12268 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12269 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12270 IEM_MC_ADVANCE_RIP();
12271 IEM_MC_END();
12272 return VINF_SUCCESS;
12273
12274 case IEMMODE_32BIT:
12275 IEM_MC_BEGIN(0, 1);
12276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12278 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12279 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12280 IEM_MC_ADVANCE_RIP();
12281 IEM_MC_END();
12282 return VINF_SUCCESS;
12283
12284 case IEMMODE_64BIT:
12285 IEM_MC_BEGIN(0, 1);
12286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12288 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12289 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12290 IEM_MC_ADVANCE_RIP();
12291 IEM_MC_END();
12292 return VINF_SUCCESS;
12293
12294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12295 }
12296 }
12297}
12298
12299
12300
12301
12302/** Opcode 0xc8. */
12303FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12304{
12305 IEMOP_MNEMONIC("enter Iw,Ib");
12306 IEMOP_HLP_MIN_186();
12307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12308 IEMOP_HLP_NO_LOCK_PREFIX();
12309 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12310 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12311 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12312}
12313
12314
12315/** Opcode 0xc9. */
12316FNIEMOP_DEF(iemOp_leave)
12317{
12318 IEMOP_MNEMONIC("retn");
12319 IEMOP_HLP_MIN_186();
12320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12321 IEMOP_HLP_NO_LOCK_PREFIX();
12322 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12323}
12324
12325
12326/** Opcode 0xca. */
12327FNIEMOP_DEF(iemOp_retf_Iw)
12328{
12329 IEMOP_MNEMONIC("retf Iw");
12330 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12331 IEMOP_HLP_NO_LOCK_PREFIX();
12332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12333 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12334}
12335
12336
12337/** Opcode 0xcb. */
12338FNIEMOP_DEF(iemOp_retf)
12339{
12340 IEMOP_MNEMONIC("retf");
12341 IEMOP_HLP_NO_LOCK_PREFIX();
12342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12344}
12345
12346
12347/** Opcode 0xcc. */
12348FNIEMOP_DEF(iemOp_int_3)
12349{
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12352}
12353
12354
12355/** Opcode 0xcd. */
12356FNIEMOP_DEF(iemOp_int_Ib)
12357{
12358 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12361}
12362
12363
12364/** Opcode 0xce. */
12365FNIEMOP_DEF(iemOp_into)
12366{
12367 IEMOP_MNEMONIC("into");
12368 IEMOP_HLP_NO_64BIT();
12369
12370 IEM_MC_BEGIN(2, 0);
12371 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12372 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12373 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12374 IEM_MC_END();
12375 return VINF_SUCCESS;
12376}
12377
12378
12379/** Opcode 0xcf. */
12380FNIEMOP_DEF(iemOp_iret)
12381{
12382 IEMOP_MNEMONIC("iret");
12383 IEMOP_HLP_NO_LOCK_PREFIX();
12384 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12385}
12386
12387
12388/** Opcode 0xd0. */
12389FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12390{
12391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12392 PCIEMOPSHIFTSIZES pImpl;
12393 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12394 {
12395 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12396 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12397 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12398 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12399 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12400 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12401 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12402 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12403 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12404 }
12405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12406
12407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12408 {
12409 /* register */
12410 IEMOP_HLP_NO_LOCK_PREFIX();
12411 IEM_MC_BEGIN(3, 0);
12412 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12413 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12414 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12415 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12416 IEM_MC_REF_EFLAGS(pEFlags);
12417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12418 IEM_MC_ADVANCE_RIP();
12419 IEM_MC_END();
12420 }
12421 else
12422 {
12423 /* memory */
12424 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12425 IEM_MC_BEGIN(3, 2);
12426 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12427 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12430
12431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12432 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12433 IEM_MC_FETCH_EFLAGS(EFlags);
12434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12435
12436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12437 IEM_MC_COMMIT_EFLAGS(EFlags);
12438 IEM_MC_ADVANCE_RIP();
12439 IEM_MC_END();
12440 }
12441 return VINF_SUCCESS;
12442}
12443
12444
12445
12446/** Opcode 0xd1. */
12447FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12448{
12449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12450 PCIEMOPSHIFTSIZES pImpl;
12451 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12452 {
12453 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12454 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12455 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12456 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12457 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12458 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12459 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12460 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12461 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12462 }
12463 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12464
12465 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12466 {
12467 /* register */
12468 IEMOP_HLP_NO_LOCK_PREFIX();
12469 switch (pIemCpu->enmEffOpSize)
12470 {
12471 case IEMMODE_16BIT:
12472 IEM_MC_BEGIN(3, 0);
12473 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12474 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12475 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12476 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12477 IEM_MC_REF_EFLAGS(pEFlags);
12478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12479 IEM_MC_ADVANCE_RIP();
12480 IEM_MC_END();
12481 return VINF_SUCCESS;
12482
12483 case IEMMODE_32BIT:
12484 IEM_MC_BEGIN(3, 0);
12485 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12486 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12487 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12488 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12489 IEM_MC_REF_EFLAGS(pEFlags);
12490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12491 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12492 IEM_MC_ADVANCE_RIP();
12493 IEM_MC_END();
12494 return VINF_SUCCESS;
12495
12496 case IEMMODE_64BIT:
12497 IEM_MC_BEGIN(3, 0);
12498 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12499 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12501 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12502 IEM_MC_REF_EFLAGS(pEFlags);
12503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12504 IEM_MC_ADVANCE_RIP();
12505 IEM_MC_END();
12506 return VINF_SUCCESS;
12507
12508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12509 }
12510 }
12511 else
12512 {
12513 /* memory */
12514 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12515 switch (pIemCpu->enmEffOpSize)
12516 {
12517 case IEMMODE_16BIT:
12518 IEM_MC_BEGIN(3, 2);
12519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12520 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12521 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12523
12524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12525 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12526 IEM_MC_FETCH_EFLAGS(EFlags);
12527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12528
12529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12530 IEM_MC_COMMIT_EFLAGS(EFlags);
12531 IEM_MC_ADVANCE_RIP();
12532 IEM_MC_END();
12533 return VINF_SUCCESS;
12534
12535 case IEMMODE_32BIT:
12536 IEM_MC_BEGIN(3, 2);
12537 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12538 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12539 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12541
12542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12543 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12544 IEM_MC_FETCH_EFLAGS(EFlags);
12545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12546
12547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12548 IEM_MC_COMMIT_EFLAGS(EFlags);
12549 IEM_MC_ADVANCE_RIP();
12550 IEM_MC_END();
12551 return VINF_SUCCESS;
12552
12553 case IEMMODE_64BIT:
12554 IEM_MC_BEGIN(3, 2);
12555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12556 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12557 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12559
12560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12561 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12562 IEM_MC_FETCH_EFLAGS(EFlags);
12563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12564
12565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12566 IEM_MC_COMMIT_EFLAGS(EFlags);
12567 IEM_MC_ADVANCE_RIP();
12568 IEM_MC_END();
12569 return VINF_SUCCESS;
12570
12571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12572 }
12573 }
12574}
12575
12576
12577/** Opcode 0xd2. */
12578FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12579{
12580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12581 PCIEMOPSHIFTSIZES pImpl;
12582 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12583 {
12584 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12585 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12586 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12587 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12588 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12589 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12590 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12591 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12592 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12593 }
12594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12595
12596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12597 {
12598 /* register */
12599 IEMOP_HLP_NO_LOCK_PREFIX();
12600 IEM_MC_BEGIN(3, 0);
12601 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12602 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12604 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12605 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12606 IEM_MC_REF_EFLAGS(pEFlags);
12607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12608 IEM_MC_ADVANCE_RIP();
12609 IEM_MC_END();
12610 }
12611 else
12612 {
12613 /* memory */
12614 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12615 IEM_MC_BEGIN(3, 2);
12616 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12617 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12618 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12620
12621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12622 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12623 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12624 IEM_MC_FETCH_EFLAGS(EFlags);
12625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12626
12627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12628 IEM_MC_COMMIT_EFLAGS(EFlags);
12629 IEM_MC_ADVANCE_RIP();
12630 IEM_MC_END();
12631 }
12632 return VINF_SUCCESS;
12633}
12634
12635
12636/** Opcode 0xd3. */
12637FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12638{
12639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12640 PCIEMOPSHIFTSIZES pImpl;
12641 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12642 {
12643 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12644 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12645 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12646 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12647 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12648 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12649 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12650 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12651 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12652 }
12653 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12654
12655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12656 {
12657 /* register */
12658 IEMOP_HLP_NO_LOCK_PREFIX();
12659 switch (pIemCpu->enmEffOpSize)
12660 {
12661 case IEMMODE_16BIT:
12662 IEM_MC_BEGIN(3, 0);
12663 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12664 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12666 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12667 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12668 IEM_MC_REF_EFLAGS(pEFlags);
12669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12670 IEM_MC_ADVANCE_RIP();
12671 IEM_MC_END();
12672 return VINF_SUCCESS;
12673
12674 case IEMMODE_32BIT:
12675 IEM_MC_BEGIN(3, 0);
12676 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12677 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12679 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12680 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12681 IEM_MC_REF_EFLAGS(pEFlags);
12682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12683 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12684 IEM_MC_ADVANCE_RIP();
12685 IEM_MC_END();
12686 return VINF_SUCCESS;
12687
12688 case IEMMODE_64BIT:
12689 IEM_MC_BEGIN(3, 0);
12690 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12691 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12692 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12693 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12694 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12695 IEM_MC_REF_EFLAGS(pEFlags);
12696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12697 IEM_MC_ADVANCE_RIP();
12698 IEM_MC_END();
12699 return VINF_SUCCESS;
12700
12701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12702 }
12703 }
12704 else
12705 {
12706 /* memory */
12707 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12708 switch (pIemCpu->enmEffOpSize)
12709 {
12710 case IEMMODE_16BIT:
12711 IEM_MC_BEGIN(3, 2);
12712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12713 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12714 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12716
12717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12718 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12719 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12720 IEM_MC_FETCH_EFLAGS(EFlags);
12721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12722
12723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12724 IEM_MC_COMMIT_EFLAGS(EFlags);
12725 IEM_MC_ADVANCE_RIP();
12726 IEM_MC_END();
12727 return VINF_SUCCESS;
12728
12729 case IEMMODE_32BIT:
12730 IEM_MC_BEGIN(3, 2);
12731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12732 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12733 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12735
12736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12737 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12738 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12739 IEM_MC_FETCH_EFLAGS(EFlags);
12740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12741
12742 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12743 IEM_MC_COMMIT_EFLAGS(EFlags);
12744 IEM_MC_ADVANCE_RIP();
12745 IEM_MC_END();
12746 return VINF_SUCCESS;
12747
12748 case IEMMODE_64BIT:
12749 IEM_MC_BEGIN(3, 2);
12750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12751 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12752 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12754
12755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12756 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12757 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12758 IEM_MC_FETCH_EFLAGS(EFlags);
12759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12760
12761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12762 IEM_MC_COMMIT_EFLAGS(EFlags);
12763 IEM_MC_ADVANCE_RIP();
12764 IEM_MC_END();
12765 return VINF_SUCCESS;
12766
12767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12768 }
12769 }
12770}
12771
12772/** Opcode 0xd4. */
12773FNIEMOP_DEF(iemOp_aam_Ib)
12774{
12775 IEMOP_MNEMONIC("aam Ib");
12776 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12777 IEMOP_HLP_NO_LOCK_PREFIX();
12778 IEMOP_HLP_NO_64BIT();
12779 if (!bImm)
12780 return IEMOP_RAISE_DIVIDE_ERROR();
12781 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12782}
12783
12784
12785/** Opcode 0xd5. */
12786FNIEMOP_DEF(iemOp_aad_Ib)
12787{
12788 IEMOP_MNEMONIC("aad Ib");
12789 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12790 IEMOP_HLP_NO_LOCK_PREFIX();
12791 IEMOP_HLP_NO_64BIT();
12792 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12793}
12794
12795
12796/** Opcode 0xd6. */
12797FNIEMOP_DEF(iemOp_salc)
12798{
12799 IEMOP_MNEMONIC("salc");
12800 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12801 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12803 IEMOP_HLP_NO_64BIT();
12804
12805 IEM_MC_BEGIN(0, 0);
12806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12807 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12808 } IEM_MC_ELSE() {
12809 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12810 } IEM_MC_ENDIF();
12811 IEM_MC_ADVANCE_RIP();
12812 IEM_MC_END();
12813 return VINF_SUCCESS;
12814}
12815
12816
12817/** Opcode 0xd7. */
12818FNIEMOP_DEF(iemOp_xlat)
12819{
12820 IEMOP_MNEMONIC("xlat");
12821 IEMOP_HLP_NO_LOCK_PREFIX();
12822 switch (pIemCpu->enmEffAddrMode)
12823 {
12824 case IEMMODE_16BIT:
12825 IEM_MC_BEGIN(2, 0);
12826 IEM_MC_LOCAL(uint8_t, u8Tmp);
12827 IEM_MC_LOCAL(uint16_t, u16Addr);
12828 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12829 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12830 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12831 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12832 IEM_MC_ADVANCE_RIP();
12833 IEM_MC_END();
12834 return VINF_SUCCESS;
12835
12836 case IEMMODE_32BIT:
12837 IEM_MC_BEGIN(2, 0);
12838 IEM_MC_LOCAL(uint8_t, u8Tmp);
12839 IEM_MC_LOCAL(uint32_t, u32Addr);
12840 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12841 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12842 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12843 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12844 IEM_MC_ADVANCE_RIP();
12845 IEM_MC_END();
12846 return VINF_SUCCESS;
12847
12848 case IEMMODE_64BIT:
12849 IEM_MC_BEGIN(2, 0);
12850 IEM_MC_LOCAL(uint8_t, u8Tmp);
12851 IEM_MC_LOCAL(uint64_t, u64Addr);
12852 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12853 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12854 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12855 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12856 IEM_MC_ADVANCE_RIP();
12857 IEM_MC_END();
12858 return VINF_SUCCESS;
12859
12860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12861 }
12862}
12863
12864
12865/**
12866 * Common worker for FPU instructions working on ST0 and STn, and storing the
12867 * result in ST0.
12868 *
12869 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12870 */
12871FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12872{
12873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12874
12875 IEM_MC_BEGIN(3, 1);
12876 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12877 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12878 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12879 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12880
12881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12883 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12884 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12885 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12886 IEM_MC_ELSE()
12887 IEM_MC_FPU_STACK_UNDERFLOW(0);
12888 IEM_MC_ENDIF();
12889 IEM_MC_USED_FPU();
12890 IEM_MC_ADVANCE_RIP();
12891
12892 IEM_MC_END();
12893 return VINF_SUCCESS;
12894}
12895
12896
12897/**
12898 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12899 * flags.
12900 *
12901 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12902 */
12903FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12904{
12905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12906
12907 IEM_MC_BEGIN(3, 1);
12908 IEM_MC_LOCAL(uint16_t, u16Fsw);
12909 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12911 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12912
12913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12915 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12916 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12917 IEM_MC_UPDATE_FSW(u16Fsw);
12918 IEM_MC_ELSE()
12919 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12920 IEM_MC_ENDIF();
12921 IEM_MC_USED_FPU();
12922 IEM_MC_ADVANCE_RIP();
12923
12924 IEM_MC_END();
12925 return VINF_SUCCESS;
12926}
12927
12928
12929/**
12930 * Common worker for FPU instructions working on ST0 and STn, only affecting
12931 * flags, and popping when done.
12932 *
12933 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12934 */
12935FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12936{
12937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12938
12939 IEM_MC_BEGIN(3, 1);
12940 IEM_MC_LOCAL(uint16_t, u16Fsw);
12941 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12944
12945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12947 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12948 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12949 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12950 IEM_MC_ELSE()
12951 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12952 IEM_MC_ENDIF();
12953 IEM_MC_USED_FPU();
12954 IEM_MC_ADVANCE_RIP();
12955
12956 IEM_MC_END();
12957 return VINF_SUCCESS;
12958}
12959
12960
12961/** Opcode 0xd8 11/0. */
12962FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12963{
12964 IEMOP_MNEMONIC("fadd st0,stN");
12965 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12966}
12967
12968
12969/** Opcode 0xd8 11/1. */
12970FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC("fmul st0,stN");
12973 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12974}
12975
12976
12977/** Opcode 0xd8 11/2. */
12978FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12979{
12980 IEMOP_MNEMONIC("fcom st0,stN");
12981 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12982}
12983
12984
12985/** Opcode 0xd8 11/3. */
12986FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12987{
12988 IEMOP_MNEMONIC("fcomp st0,stN");
12989 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12990}
12991
12992
12993/** Opcode 0xd8 11/4. */
12994FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12995{
12996 IEMOP_MNEMONIC("fsub st0,stN");
12997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12998}
12999
13000
13001/** Opcode 0xd8 11/5. */
13002FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13003{
13004 IEMOP_MNEMONIC("fsubr st0,stN");
13005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13006}
13007
13008
13009/** Opcode 0xd8 11/6. */
13010FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13011{
13012 IEMOP_MNEMONIC("fdiv st0,stN");
13013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13014}
13015
13016
13017/** Opcode 0xd8 11/7. */
13018FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13019{
13020 IEMOP_MNEMONIC("fdivr st0,stN");
13021 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13022}
13023
13024
13025/**
13026 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13027 * the result in ST0.
13028 *
13029 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13030 */
13031FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13032{
13033 IEM_MC_BEGIN(3, 3);
13034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13035 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13036 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13037 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13038 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13039 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13040
13041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13043
13044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13045 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13046 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13047
13048 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13049 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13050 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13051 IEM_MC_ELSE()
13052 IEM_MC_FPU_STACK_UNDERFLOW(0);
13053 IEM_MC_ENDIF();
13054 IEM_MC_USED_FPU();
13055 IEM_MC_ADVANCE_RIP();
13056
13057 IEM_MC_END();
13058 return VINF_SUCCESS;
13059}
13060
13061
13062/** Opcode 0xd8 !11/0. */
13063FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13064{
13065 IEMOP_MNEMONIC("fadd st0,m32r");
13066 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13067}
13068
13069
13070/** Opcode 0xd8 !11/1. */
13071FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13072{
13073 IEMOP_MNEMONIC("fmul st0,m32r");
13074 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13075}
13076
13077
13078/** Opcode 0xd8 !11/2. */
13079FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13080{
13081 IEMOP_MNEMONIC("fcom st0,m32r");
13082
13083 IEM_MC_BEGIN(3, 3);
13084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13085 IEM_MC_LOCAL(uint16_t, u16Fsw);
13086 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13087 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13089 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13090
13091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13093
13094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13096 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13097
13098 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13099 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13100 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13101 IEM_MC_ELSE()
13102 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13103 IEM_MC_ENDIF();
13104 IEM_MC_USED_FPU();
13105 IEM_MC_ADVANCE_RIP();
13106
13107 IEM_MC_END();
13108 return VINF_SUCCESS;
13109}
13110
13111
13112/** Opcode 0xd8 !11/3. */
13113FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13114{
13115 IEMOP_MNEMONIC("fcomp st0,m32r");
13116
13117 IEM_MC_BEGIN(3, 3);
13118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13119 IEM_MC_LOCAL(uint16_t, u16Fsw);
13120 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13121 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13123 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13124
13125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13127
13128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13130 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13131
13132 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13133 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13134 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13135 IEM_MC_ELSE()
13136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13137 IEM_MC_ENDIF();
13138 IEM_MC_USED_FPU();
13139 IEM_MC_ADVANCE_RIP();
13140
13141 IEM_MC_END();
13142 return VINF_SUCCESS;
13143}
13144
13145
13146/** Opcode 0xd8 !11/4. */
13147FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13148{
13149 IEMOP_MNEMONIC("fsub st0,m32r");
13150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13151}
13152
13153
13154/** Opcode 0xd8 !11/5. */
13155FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13156{
13157 IEMOP_MNEMONIC("fsubr st0,m32r");
13158 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13159}
13160
13161
13162/** Opcode 0xd8 !11/6. */
13163FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13164{
13165 IEMOP_MNEMONIC("fdiv st0,m32r");
13166 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13167}
13168
13169
13170/** Opcode 0xd8 !11/7. */
13171FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13172{
13173 IEMOP_MNEMONIC("fdivr st0,m32r");
13174 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13175}
13176
13177
13178/** Opcode 0xd8. */
13179FNIEMOP_DEF(iemOp_EscF0)
13180{
13181 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13183
13184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13185 {
13186 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13187 {
13188 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13189 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13190 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13191 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13192 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13193 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13194 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13195 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13197 }
13198 }
13199 else
13200 {
13201 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13202 {
13203 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13204 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13205 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13206 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13207 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13208 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13209 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13210 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13212 }
13213 }
13214}
13215
13216
13217/** Opcode 0xd9 /0 mem32real
13218 * @sa iemOp_fld_m64r */
13219FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13220{
13221 IEMOP_MNEMONIC("fld m32r");
13222
13223 IEM_MC_BEGIN(2, 3);
13224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13225 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13226 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13227 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13228 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13229
13230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13232
13233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13235 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13236
13237 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13238 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13239 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13240 IEM_MC_ELSE()
13241 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13242 IEM_MC_ENDIF();
13243 IEM_MC_USED_FPU();
13244 IEM_MC_ADVANCE_RIP();
13245
13246 IEM_MC_END();
13247 return VINF_SUCCESS;
13248}
13249
13250
13251/** Opcode 0xd9 !11/2 mem32real */
13252FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13253{
13254 IEMOP_MNEMONIC("fst m32r");
13255 IEM_MC_BEGIN(3, 2);
13256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13257 IEM_MC_LOCAL(uint16_t, u16Fsw);
13258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13259 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13260 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13261
13262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13266
13267 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13268 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13269 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13270 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13271 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13272 IEM_MC_ELSE()
13273 IEM_MC_IF_FCW_IM()
13274 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13275 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13276 IEM_MC_ENDIF();
13277 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13278 IEM_MC_ENDIF();
13279 IEM_MC_USED_FPU();
13280 IEM_MC_ADVANCE_RIP();
13281
13282 IEM_MC_END();
13283 return VINF_SUCCESS;
13284}
13285
13286
13287/** Opcode 0xd9 !11/3 */
13288FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13289{
13290 IEMOP_MNEMONIC("fstp m32r");
13291 IEM_MC_BEGIN(3, 2);
13292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13293 IEM_MC_LOCAL(uint16_t, u16Fsw);
13294 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13295 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13296 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13297
13298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13302
13303 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13304 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13305 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13306 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13307 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13308 IEM_MC_ELSE()
13309 IEM_MC_IF_FCW_IM()
13310 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13311 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13312 IEM_MC_ENDIF();
13313 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13314 IEM_MC_ENDIF();
13315 IEM_MC_USED_FPU();
13316 IEM_MC_ADVANCE_RIP();
13317
13318 IEM_MC_END();
13319 return VINF_SUCCESS;
13320}
13321
13322
13323/** Opcode 0xd9 !11/4 */
13324FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13325{
13326 IEMOP_MNEMONIC("fldenv m14/28byte");
13327 IEM_MC_BEGIN(3, 0);
13328 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13329 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13330 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13334 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13335 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13336 IEM_MC_END();
13337 return VINF_SUCCESS;
13338}
13339
13340
13341/** Opcode 0xd9 !11/5 */
13342FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13343{
13344 IEMOP_MNEMONIC("fldcw m2byte");
13345 IEM_MC_BEGIN(1, 1);
13346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13347 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13351 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13352 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13353 IEM_MC_END();
13354 return VINF_SUCCESS;
13355}
13356
13357
13358/** Opcode 0xd9 !11/6 */
13359FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13360{
13361 IEMOP_MNEMONIC("fstenv m14/m28byte");
13362 IEM_MC_BEGIN(3, 0);
13363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13364 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13365 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13369 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13370 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13371 IEM_MC_END();
13372 return VINF_SUCCESS;
13373}
13374
13375
13376/** Opcode 0xd9 !11/7 */
13377FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13378{
13379 IEMOP_MNEMONIC("fnstcw m2byte");
13380 IEM_MC_BEGIN(2, 0);
13381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13382 IEM_MC_LOCAL(uint16_t, u16Fcw);
13383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13385 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13386 IEM_MC_FETCH_FCW(u16Fcw);
13387 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13388 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13389 IEM_MC_END();
13390 return VINF_SUCCESS;
13391}
13392
13393
13394/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13395FNIEMOP_DEF(iemOp_fnop)
13396{
13397 IEMOP_MNEMONIC("fnop");
13398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13399
13400 IEM_MC_BEGIN(0, 0);
13401 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13402 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13403 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13404 * intel optimizations. Investigate. */
13405 IEM_MC_UPDATE_FPU_OPCODE_IP();
13406 IEM_MC_USED_FPU();
13407 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13408 IEM_MC_END();
13409 return VINF_SUCCESS;
13410}
13411
13412
13413/** Opcode 0xd9 11/0 stN */
13414FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13415{
13416 IEMOP_MNEMONIC("fld stN");
13417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13418
13419 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13420 * indicates that it does. */
13421 IEM_MC_BEGIN(0, 2);
13422 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13423 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13426 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13427 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13428 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13429 IEM_MC_ELSE()
13430 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13431 IEM_MC_ENDIF();
13432 IEM_MC_USED_FPU();
13433 IEM_MC_ADVANCE_RIP();
13434 IEM_MC_END();
13435
13436 return VINF_SUCCESS;
13437}
13438
13439
13440/** Opcode 0xd9 11/3 stN */
13441FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13442{
13443 IEMOP_MNEMONIC("fxch stN");
13444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13445
13446 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13447 * indicates that it does. */
13448 IEM_MC_BEGIN(1, 3);
13449 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13450 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13451 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13452 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13455 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13456 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13457 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13458 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13459 IEM_MC_ELSE()
13460 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13461 IEM_MC_ENDIF();
13462 IEM_MC_USED_FPU();
13463 IEM_MC_ADVANCE_RIP();
13464 IEM_MC_END();
13465
13466 return VINF_SUCCESS;
13467}
13468
13469
13470/** Opcode 0xd9 11/4, 0xdd 11/2. */
13471FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13472{
13473 IEMOP_MNEMONIC("fstp st0,stN");
13474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13475
13476 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13477 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13478 if (!iDstReg)
13479 {
13480 IEM_MC_BEGIN(0, 1);
13481 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13484 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13485 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13486 IEM_MC_ELSE()
13487 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13488 IEM_MC_ENDIF();
13489 IEM_MC_USED_FPU();
13490 IEM_MC_ADVANCE_RIP();
13491 IEM_MC_END();
13492 }
13493 else
13494 {
13495 IEM_MC_BEGIN(0, 2);
13496 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13497 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13500 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13501 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13502 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13503 IEM_MC_ELSE()
13504 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13505 IEM_MC_ENDIF();
13506 IEM_MC_USED_FPU();
13507 IEM_MC_ADVANCE_RIP();
13508 IEM_MC_END();
13509 }
13510 return VINF_SUCCESS;
13511}
13512
13513
13514/**
13515 * Common worker for FPU instructions working on ST0 and replaces it with the
13516 * result, i.e. unary operators.
13517 *
13518 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13519 */
13520FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13521{
13522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13523
13524 IEM_MC_BEGIN(2, 1);
13525 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13526 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13527 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13528
13529 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13530 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13531 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13532 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13533 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13534 IEM_MC_ELSE()
13535 IEM_MC_FPU_STACK_UNDERFLOW(0);
13536 IEM_MC_ENDIF();
13537 IEM_MC_USED_FPU();
13538 IEM_MC_ADVANCE_RIP();
13539
13540 IEM_MC_END();
13541 return VINF_SUCCESS;
13542}
13543
13544
13545/** Opcode 0xd9 0xe0. */
13546FNIEMOP_DEF(iemOp_fchs)
13547{
13548 IEMOP_MNEMONIC("fchs st0");
13549 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13550}
13551
13552
13553/** Opcode 0xd9 0xe1. */
13554FNIEMOP_DEF(iemOp_fabs)
13555{
13556 IEMOP_MNEMONIC("fabs st0");
13557 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13558}
13559
13560
13561/**
13562 * Common worker for FPU instructions working on ST0 and only returns FSW.
13563 *
13564 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13565 */
13566FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13567{
13568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13569
13570 IEM_MC_BEGIN(2, 1);
13571 IEM_MC_LOCAL(uint16_t, u16Fsw);
13572 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13574
13575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13577 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13578 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13579 IEM_MC_UPDATE_FSW(u16Fsw);
13580 IEM_MC_ELSE()
13581 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13582 IEM_MC_ENDIF();
13583 IEM_MC_USED_FPU();
13584 IEM_MC_ADVANCE_RIP();
13585
13586 IEM_MC_END();
13587 return VINF_SUCCESS;
13588}
13589
13590
13591/** Opcode 0xd9 0xe4. */
13592FNIEMOP_DEF(iemOp_ftst)
13593{
13594 IEMOP_MNEMONIC("ftst st0");
13595 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13596}
13597
13598
13599/** Opcode 0xd9 0xe5. */
13600FNIEMOP_DEF(iemOp_fxam)
13601{
13602 IEMOP_MNEMONIC("fxam st0");
13603 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13604}
13605
13606
13607/**
13608 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13609 *
13610 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13611 */
13612FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13613{
13614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13615
13616 IEM_MC_BEGIN(1, 1);
13617 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13618 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13619
13620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13622 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13623 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13624 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13625 IEM_MC_ELSE()
13626 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13627 IEM_MC_ENDIF();
13628 IEM_MC_USED_FPU();
13629 IEM_MC_ADVANCE_RIP();
13630
13631 IEM_MC_END();
13632 return VINF_SUCCESS;
13633}
13634
13635
13636/** Opcode 0xd9 0xe8. */
13637FNIEMOP_DEF(iemOp_fld1)
13638{
13639 IEMOP_MNEMONIC("fld1");
13640 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13641}
13642
13643
13644/** Opcode 0xd9 0xe9. */
13645FNIEMOP_DEF(iemOp_fldl2t)
13646{
13647 IEMOP_MNEMONIC("fldl2t");
13648 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13649}
13650
13651
13652/** Opcode 0xd9 0xea. */
13653FNIEMOP_DEF(iemOp_fldl2e)
13654{
13655 IEMOP_MNEMONIC("fldl2e");
13656 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13657}
13658
13659/** Opcode 0xd9 0xeb. */
13660FNIEMOP_DEF(iemOp_fldpi)
13661{
13662 IEMOP_MNEMONIC("fldpi");
13663 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13664}
13665
13666
13667/** Opcode 0xd9 0xec. */
13668FNIEMOP_DEF(iemOp_fldlg2)
13669{
13670 IEMOP_MNEMONIC("fldlg2");
13671 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13672}
13673
13674/** Opcode 0xd9 0xed. */
13675FNIEMOP_DEF(iemOp_fldln2)
13676{
13677 IEMOP_MNEMONIC("fldln2");
13678 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13679}
13680
13681
13682/** Opcode 0xd9 0xee. */
13683FNIEMOP_DEF(iemOp_fldz)
13684{
13685 IEMOP_MNEMONIC("fldz");
13686 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13687}
13688
13689
13690/** Opcode 0xd9 0xf0. */
13691FNIEMOP_DEF(iemOp_f2xm1)
13692{
13693 IEMOP_MNEMONIC("f2xm1 st0");
13694 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13695}
13696
13697
13698/** Opcode 0xd9 0xf1. */
13699FNIEMOP_DEF(iemOp_fylx2)
13700{
13701 IEMOP_MNEMONIC("fylx2 st0");
13702 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13703}
13704
13705
13706/**
13707 * Common worker for FPU instructions working on ST0 and having two outputs, one
13708 * replacing ST0 and one pushed onto the stack.
13709 *
13710 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13711 */
13712FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13713{
13714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13715
13716 IEM_MC_BEGIN(2, 1);
13717 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13718 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13720
13721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13723 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13724 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13725 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13726 IEM_MC_ELSE()
13727 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13728 IEM_MC_ENDIF();
13729 IEM_MC_USED_FPU();
13730 IEM_MC_ADVANCE_RIP();
13731
13732 IEM_MC_END();
13733 return VINF_SUCCESS;
13734}
13735
13736
13737/** Opcode 0xd9 0xf2. */
13738FNIEMOP_DEF(iemOp_fptan)
13739{
13740 IEMOP_MNEMONIC("fptan st0");
13741 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13742}
13743
13744
13745/**
13746 * Common worker for FPU instructions working on STn and ST0, storing the result
13747 * in STn, and popping the stack unless IE, DE or ZE was raised.
13748 *
13749 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13750 */
13751FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13752{
13753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13754
13755 IEM_MC_BEGIN(3, 1);
13756 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13757 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13758 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13759 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13760
13761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13763
13764 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13765 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13766 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13767 IEM_MC_ELSE()
13768 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13769 IEM_MC_ENDIF();
13770 IEM_MC_USED_FPU();
13771 IEM_MC_ADVANCE_RIP();
13772
13773 IEM_MC_END();
13774 return VINF_SUCCESS;
13775}
13776
13777
13778/** Opcode 0xd9 0xf3. */
13779FNIEMOP_DEF(iemOp_fpatan)
13780{
13781 IEMOP_MNEMONIC("fpatan st1,st0");
13782 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13783}
13784
13785
13786/** Opcode 0xd9 0xf4. */
13787FNIEMOP_DEF(iemOp_fxtract)
13788{
13789 IEMOP_MNEMONIC("fxtract st0");
13790 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13791}
13792
13793
13794/** Opcode 0xd9 0xf5. */
13795FNIEMOP_DEF(iemOp_fprem1)
13796{
13797 IEMOP_MNEMONIC("fprem1 st0, st1");
13798 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13799}
13800
13801
13802/** Opcode 0xd9 0xf6. */
13803FNIEMOP_DEF(iemOp_fdecstp)
13804{
13805 IEMOP_MNEMONIC("fdecstp");
13806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13807 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13808 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13809 * FINCSTP and FDECSTP. */
13810
13811 IEM_MC_BEGIN(0,0);
13812
13813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13815
13816 IEM_MC_FPU_STACK_DEC_TOP();
13817 IEM_MC_UPDATE_FSW_CONST(0);
13818
13819 IEM_MC_USED_FPU();
13820 IEM_MC_ADVANCE_RIP();
13821 IEM_MC_END();
13822 return VINF_SUCCESS;
13823}
13824
13825
13826/** Opcode 0xd9 0xf7. */
13827FNIEMOP_DEF(iemOp_fincstp)
13828{
13829 IEMOP_MNEMONIC("fincstp");
13830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13831 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13832 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13833 * FINCSTP and FDECSTP. */
13834
13835 IEM_MC_BEGIN(0,0);
13836
13837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13839
13840 IEM_MC_FPU_STACK_INC_TOP();
13841 IEM_MC_UPDATE_FSW_CONST(0);
13842
13843 IEM_MC_USED_FPU();
13844 IEM_MC_ADVANCE_RIP();
13845 IEM_MC_END();
13846 return VINF_SUCCESS;
13847}
13848
13849
13850/** Opcode 0xd9 0xf8. */
13851FNIEMOP_DEF(iemOp_fprem)
13852{
13853 IEMOP_MNEMONIC("fprem st0, st1");
13854 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13855}
13856
13857
13858/** Opcode 0xd9 0xf9. */
13859FNIEMOP_DEF(iemOp_fyl2xp1)
13860{
13861 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13862 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13863}
13864
13865
13866/** Opcode 0xd9 0xfa. */
13867FNIEMOP_DEF(iemOp_fsqrt)
13868{
13869 IEMOP_MNEMONIC("fsqrt st0");
13870 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13871}
13872
13873
13874/** Opcode 0xd9 0xfb. */
13875FNIEMOP_DEF(iemOp_fsincos)
13876{
13877 IEMOP_MNEMONIC("fsincos st0");
13878 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13879}
13880
13881
13882/** Opcode 0xd9 0xfc. */
13883FNIEMOP_DEF(iemOp_frndint)
13884{
13885 IEMOP_MNEMONIC("frndint st0");
13886 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13887}
13888
13889
13890/** Opcode 0xd9 0xfd. */
13891FNIEMOP_DEF(iemOp_fscale)
13892{
13893 IEMOP_MNEMONIC("fscale st0, st1");
13894 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13895}
13896
13897
13898/** Opcode 0xd9 0xfe. */
13899FNIEMOP_DEF(iemOp_fsin)
13900{
13901 IEMOP_MNEMONIC("fsin st0");
13902 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13903}
13904
13905
13906/** Opcode 0xd9 0xff. */
13907FNIEMOP_DEF(iemOp_fcos)
13908{
13909 IEMOP_MNEMONIC("fcos st0");
13910 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13911}
13912
13913
13914/** Used by iemOp_EscF1. */
13915static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13916{
13917 /* 0xe0 */ iemOp_fchs,
13918 /* 0xe1 */ iemOp_fabs,
13919 /* 0xe2 */ iemOp_Invalid,
13920 /* 0xe3 */ iemOp_Invalid,
13921 /* 0xe4 */ iemOp_ftst,
13922 /* 0xe5 */ iemOp_fxam,
13923 /* 0xe6 */ iemOp_Invalid,
13924 /* 0xe7 */ iemOp_Invalid,
13925 /* 0xe8 */ iemOp_fld1,
13926 /* 0xe9 */ iemOp_fldl2t,
13927 /* 0xea */ iemOp_fldl2e,
13928 /* 0xeb */ iemOp_fldpi,
13929 /* 0xec */ iemOp_fldlg2,
13930 /* 0xed */ iemOp_fldln2,
13931 /* 0xee */ iemOp_fldz,
13932 /* 0xef */ iemOp_Invalid,
13933 /* 0xf0 */ iemOp_f2xm1,
13934 /* 0xf1 */ iemOp_fylx2,
13935 /* 0xf2 */ iemOp_fptan,
13936 /* 0xf3 */ iemOp_fpatan,
13937 /* 0xf4 */ iemOp_fxtract,
13938 /* 0xf5 */ iemOp_fprem1,
13939 /* 0xf6 */ iemOp_fdecstp,
13940 /* 0xf7 */ iemOp_fincstp,
13941 /* 0xf8 */ iemOp_fprem,
13942 /* 0xf9 */ iemOp_fyl2xp1,
13943 /* 0xfa */ iemOp_fsqrt,
13944 /* 0xfb */ iemOp_fsincos,
13945 /* 0xfc */ iemOp_frndint,
13946 /* 0xfd */ iemOp_fscale,
13947 /* 0xfe */ iemOp_fsin,
13948 /* 0xff */ iemOp_fcos
13949};
13950
13951
13952/** Opcode 0xd9. */
13953FNIEMOP_DEF(iemOp_EscF1)
13954{
13955 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13958 {
13959 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13960 {
13961 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13962 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13963 case 2:
13964 if (bRm == 0xd0)
13965 return FNIEMOP_CALL(iemOp_fnop);
13966 return IEMOP_RAISE_INVALID_OPCODE();
13967 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13968 case 4:
13969 case 5:
13970 case 6:
13971 case 7:
13972 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13973 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13975 }
13976 }
13977 else
13978 {
13979 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13980 {
13981 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13982 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13983 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13984 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13985 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13986 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13987 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13988 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13990 }
13991 }
13992}
13993
13994
13995/** Opcode 0xda 11/0. */
13996FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13997{
13998 IEMOP_MNEMONIC("fcmovb st0,stN");
13999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14000
14001 IEM_MC_BEGIN(0, 1);
14002 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14003
14004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14006
14007 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14009 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14010 IEM_MC_ENDIF();
14011 IEM_MC_UPDATE_FPU_OPCODE_IP();
14012 IEM_MC_ELSE()
14013 IEM_MC_FPU_STACK_UNDERFLOW(0);
14014 IEM_MC_ENDIF();
14015 IEM_MC_USED_FPU();
14016 IEM_MC_ADVANCE_RIP();
14017
14018 IEM_MC_END();
14019 return VINF_SUCCESS;
14020}
14021
14022
14023/** Opcode 0xda 11/1. */
14024FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14025{
14026 IEMOP_MNEMONIC("fcmove st0,stN");
14027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14028
14029 IEM_MC_BEGIN(0, 1);
14030 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14031
14032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14034
14035 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14037 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14038 IEM_MC_ENDIF();
14039 IEM_MC_UPDATE_FPU_OPCODE_IP();
14040 IEM_MC_ELSE()
14041 IEM_MC_FPU_STACK_UNDERFLOW(0);
14042 IEM_MC_ENDIF();
14043 IEM_MC_USED_FPU();
14044 IEM_MC_ADVANCE_RIP();
14045
14046 IEM_MC_END();
14047 return VINF_SUCCESS;
14048}
14049
14050
14051/** Opcode 0xda 11/2. */
14052FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14053{
14054 IEMOP_MNEMONIC("fcmovbe st0,stN");
14055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14056
14057 IEM_MC_BEGIN(0, 1);
14058 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14059
14060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14062
14063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14064 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14065 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14066 IEM_MC_ENDIF();
14067 IEM_MC_UPDATE_FPU_OPCODE_IP();
14068 IEM_MC_ELSE()
14069 IEM_MC_FPU_STACK_UNDERFLOW(0);
14070 IEM_MC_ENDIF();
14071 IEM_MC_USED_FPU();
14072 IEM_MC_ADVANCE_RIP();
14073
14074 IEM_MC_END();
14075 return VINF_SUCCESS;
14076}
14077
14078
14079/** Opcode 0xda 11/3. */
14080FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14081{
14082 IEMOP_MNEMONIC("fcmovu st0,stN");
14083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14084
14085 IEM_MC_BEGIN(0, 1);
14086 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14087
14088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14090
14091 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14093 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14094 IEM_MC_ENDIF();
14095 IEM_MC_UPDATE_FPU_OPCODE_IP();
14096 IEM_MC_ELSE()
14097 IEM_MC_FPU_STACK_UNDERFLOW(0);
14098 IEM_MC_ENDIF();
14099 IEM_MC_USED_FPU();
14100 IEM_MC_ADVANCE_RIP();
14101
14102 IEM_MC_END();
14103 return VINF_SUCCESS;
14104}
14105
14106
14107/**
14108 * Common worker for FPU instructions working on ST0 and STn, only affecting
14109 * flags, and popping twice when done.
14110 *
14111 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14112 */
14113FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14114{
14115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14116
14117 IEM_MC_BEGIN(3, 1);
14118 IEM_MC_LOCAL(uint16_t, u16Fsw);
14119 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14122
14123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14125 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14126 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14127 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14128 IEM_MC_ELSE()
14129 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14130 IEM_MC_ENDIF();
14131 IEM_MC_USED_FPU();
14132 IEM_MC_ADVANCE_RIP();
14133
14134 IEM_MC_END();
14135 return VINF_SUCCESS;
14136}
14137
14138
14139/** Opcode 0xda 0xe9. */
14140FNIEMOP_DEF(iemOp_fucompp)
14141{
14142 IEMOP_MNEMONIC("fucompp st0,stN");
14143 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14144}
14145
14146
14147/**
14148 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14149 * the result in ST0.
14150 *
14151 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14152 */
14153FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14154{
14155 IEM_MC_BEGIN(3, 3);
14156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14157 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14158 IEM_MC_LOCAL(int32_t, i32Val2);
14159 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14161 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14162
14163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14165
14166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14168 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14169
14170 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14171 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14172 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14173 IEM_MC_ELSE()
14174 IEM_MC_FPU_STACK_UNDERFLOW(0);
14175 IEM_MC_ENDIF();
14176 IEM_MC_USED_FPU();
14177 IEM_MC_ADVANCE_RIP();
14178
14179 IEM_MC_END();
14180 return VINF_SUCCESS;
14181}
14182
14183
14184/** Opcode 0xda !11/0. */
14185FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14186{
14187 IEMOP_MNEMONIC("fiadd m32i");
14188 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14189}
14190
14191
14192/** Opcode 0xda !11/1. */
14193FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14194{
14195 IEMOP_MNEMONIC("fimul m32i");
14196 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14197}
14198
14199
14200/** Opcode 0xda !11/2. */
14201FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14202{
14203 IEMOP_MNEMONIC("ficom st0,m32i");
14204
14205 IEM_MC_BEGIN(3, 3);
14206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14207 IEM_MC_LOCAL(uint16_t, u16Fsw);
14208 IEM_MC_LOCAL(int32_t, i32Val2);
14209 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14210 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14211 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14212
14213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14215
14216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14218 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14219
14220 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14221 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14222 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14223 IEM_MC_ELSE()
14224 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14225 IEM_MC_ENDIF();
14226 IEM_MC_USED_FPU();
14227 IEM_MC_ADVANCE_RIP();
14228
14229 IEM_MC_END();
14230 return VINF_SUCCESS;
14231}
14232
14233
14234/** Opcode 0xda !11/3. */
14235FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14236{
14237 IEMOP_MNEMONIC("ficomp st0,m32i");
14238
14239 IEM_MC_BEGIN(3, 3);
14240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14241 IEM_MC_LOCAL(uint16_t, u16Fsw);
14242 IEM_MC_LOCAL(int32_t, i32Val2);
14243 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14244 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14245 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14246
14247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14249
14250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14252 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14253
14254 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14255 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14256 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14257 IEM_MC_ELSE()
14258 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14259 IEM_MC_ENDIF();
14260 IEM_MC_USED_FPU();
14261 IEM_MC_ADVANCE_RIP();
14262
14263 IEM_MC_END();
14264 return VINF_SUCCESS;
14265}
14266
14267
14268/** Opcode 0xda !11/4. */
14269FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14270{
14271 IEMOP_MNEMONIC("fisub m32i");
14272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14273}
14274
14275
14276/** Opcode 0xda !11/5. */
14277FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14278{
14279 IEMOP_MNEMONIC("fisubr m32i");
14280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14281}
14282
14283
14284/** Opcode 0xda !11/6. */
14285FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14286{
14287 IEMOP_MNEMONIC("fidiv m32i");
14288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14289}
14290
14291
14292/** Opcode 0xda !11/7. */
14293FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14294{
14295 IEMOP_MNEMONIC("fidivr m32i");
14296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14297}
14298
14299
14300/** Opcode 0xda. */
14301FNIEMOP_DEF(iemOp_EscF2)
14302{
14303 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14306 {
14307 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14308 {
14309 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14310 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14311 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14312 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14313 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14314 case 5:
14315 if (bRm == 0xe9)
14316 return FNIEMOP_CALL(iemOp_fucompp);
14317 return IEMOP_RAISE_INVALID_OPCODE();
14318 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14319 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14321 }
14322 }
14323 else
14324 {
14325 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14326 {
14327 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14328 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14329 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14330 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14331 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14332 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14333 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14334 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14336 }
14337 }
14338}
14339
14340
14341/** Opcode 0xdb !11/0. */
14342FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14343{
14344 IEMOP_MNEMONIC("fild m32i");
14345
14346 IEM_MC_BEGIN(2, 3);
14347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14348 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14349 IEM_MC_LOCAL(int32_t, i32Val);
14350 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14351 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14352
14353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14355
14356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14357 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14358 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14359
14360 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14361 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14362 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14363 IEM_MC_ELSE()
14364 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14365 IEM_MC_ENDIF();
14366 IEM_MC_USED_FPU();
14367 IEM_MC_ADVANCE_RIP();
14368
14369 IEM_MC_END();
14370 return VINF_SUCCESS;
14371}
14372
14373
14374/** Opcode 0xdb !11/1. */
14375FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14376{
14377 IEMOP_MNEMONIC("fisttp m32i");
14378 IEM_MC_BEGIN(3, 2);
14379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14380 IEM_MC_LOCAL(uint16_t, u16Fsw);
14381 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14382 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14383 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14384
14385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14389
14390 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14392 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14393 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14394 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14395 IEM_MC_ELSE()
14396 IEM_MC_IF_FCW_IM()
14397 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14398 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14399 IEM_MC_ENDIF();
14400 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14401 IEM_MC_ENDIF();
14402 IEM_MC_USED_FPU();
14403 IEM_MC_ADVANCE_RIP();
14404
14405 IEM_MC_END();
14406 return VINF_SUCCESS;
14407}
14408
14409
14410/** Opcode 0xdb !11/2. */
14411FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14412{
14413 IEMOP_MNEMONIC("fist m32i");
14414 IEM_MC_BEGIN(3, 2);
14415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14416 IEM_MC_LOCAL(uint16_t, u16Fsw);
14417 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14418 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14419 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14420
14421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14425
14426 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14427 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14428 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14429 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14430 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14431 IEM_MC_ELSE()
14432 IEM_MC_IF_FCW_IM()
14433 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14434 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14435 IEM_MC_ENDIF();
14436 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14437 IEM_MC_ENDIF();
14438 IEM_MC_USED_FPU();
14439 IEM_MC_ADVANCE_RIP();
14440
14441 IEM_MC_END();
14442 return VINF_SUCCESS;
14443}
14444
14445
14446/** Opcode 0xdb !11/3. */
14447FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14448{
14449 IEMOP_MNEMONIC("fisttp m32i");
14450 IEM_MC_BEGIN(3, 2);
14451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14452 IEM_MC_LOCAL(uint16_t, u16Fsw);
14453 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14454 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14456
14457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14461
14462 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14463 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14464 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14465 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14466 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14467 IEM_MC_ELSE()
14468 IEM_MC_IF_FCW_IM()
14469 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14470 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14471 IEM_MC_ENDIF();
14472 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14473 IEM_MC_ENDIF();
14474 IEM_MC_USED_FPU();
14475 IEM_MC_ADVANCE_RIP();
14476
14477 IEM_MC_END();
14478 return VINF_SUCCESS;
14479}
14480
14481
14482/** Opcode 0xdb !11/5. */
14483FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14484{
14485 IEMOP_MNEMONIC("fld m80r");
14486
14487 IEM_MC_BEGIN(2, 3);
14488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14489 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14490 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14491 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14492 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14493
14494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14496
14497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14499 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14500
14501 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14502 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14503 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14504 IEM_MC_ELSE()
14505 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14506 IEM_MC_ENDIF();
14507 IEM_MC_USED_FPU();
14508 IEM_MC_ADVANCE_RIP();
14509
14510 IEM_MC_END();
14511 return VINF_SUCCESS;
14512}
14513
14514
14515/** Opcode 0xdb !11/7. */
14516FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14517{
14518 IEMOP_MNEMONIC("fstp m80r");
14519 IEM_MC_BEGIN(3, 2);
14520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14521 IEM_MC_LOCAL(uint16_t, u16Fsw);
14522 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14523 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14524 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14525
14526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14529 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14530
14531 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14532 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14533 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14534 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14535 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14536 IEM_MC_ELSE()
14537 IEM_MC_IF_FCW_IM()
14538 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14539 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14540 IEM_MC_ENDIF();
14541 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14542 IEM_MC_ENDIF();
14543 IEM_MC_USED_FPU();
14544 IEM_MC_ADVANCE_RIP();
14545
14546 IEM_MC_END();
14547 return VINF_SUCCESS;
14548}
14549
14550
14551/** Opcode 0xdb 11/0. */
14552FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14553{
14554 IEMOP_MNEMONIC("fcmovnb st0,stN");
14555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14556
14557 IEM_MC_BEGIN(0, 1);
14558 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14559
14560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14562
14563 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14564 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14565 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14566 IEM_MC_ENDIF();
14567 IEM_MC_UPDATE_FPU_OPCODE_IP();
14568 IEM_MC_ELSE()
14569 IEM_MC_FPU_STACK_UNDERFLOW(0);
14570 IEM_MC_ENDIF();
14571 IEM_MC_USED_FPU();
14572 IEM_MC_ADVANCE_RIP();
14573
14574 IEM_MC_END();
14575 return VINF_SUCCESS;
14576}
14577
14578
14579/** Opcode 0xdb 11/1. */
14580FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14581{
14582 IEMOP_MNEMONIC("fcmovne st0,stN");
14583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14584
14585 IEM_MC_BEGIN(0, 1);
14586 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14587
14588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14590
14591 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14592 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14593 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14594 IEM_MC_ENDIF();
14595 IEM_MC_UPDATE_FPU_OPCODE_IP();
14596 IEM_MC_ELSE()
14597 IEM_MC_FPU_STACK_UNDERFLOW(0);
14598 IEM_MC_ENDIF();
14599 IEM_MC_USED_FPU();
14600 IEM_MC_ADVANCE_RIP();
14601
14602 IEM_MC_END();
14603 return VINF_SUCCESS;
14604}
14605
14606
14607/** Opcode 0xdb 11/2. */
14608FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14609{
14610 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14612
14613 IEM_MC_BEGIN(0, 1);
14614 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14615
14616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14618
14619 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14620 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14621 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14622 IEM_MC_ENDIF();
14623 IEM_MC_UPDATE_FPU_OPCODE_IP();
14624 IEM_MC_ELSE()
14625 IEM_MC_FPU_STACK_UNDERFLOW(0);
14626 IEM_MC_ENDIF();
14627 IEM_MC_USED_FPU();
14628 IEM_MC_ADVANCE_RIP();
14629
14630 IEM_MC_END();
14631 return VINF_SUCCESS;
14632}
14633
14634
14635/** Opcode 0xdb 11/3. */
14636FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14637{
14638 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14640
14641 IEM_MC_BEGIN(0, 1);
14642 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14643
14644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14646
14647 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14648 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14649 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14650 IEM_MC_ENDIF();
14651 IEM_MC_UPDATE_FPU_OPCODE_IP();
14652 IEM_MC_ELSE()
14653 IEM_MC_FPU_STACK_UNDERFLOW(0);
14654 IEM_MC_ENDIF();
14655 IEM_MC_USED_FPU();
14656 IEM_MC_ADVANCE_RIP();
14657
14658 IEM_MC_END();
14659 return VINF_SUCCESS;
14660}
14661
14662
14663/** Opcode 0xdb 0xe0. */
14664FNIEMOP_DEF(iemOp_fneni)
14665{
14666 IEMOP_MNEMONIC("fneni (8087/ign)");
14667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14668 IEM_MC_BEGIN(0,0);
14669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14670 IEM_MC_ADVANCE_RIP();
14671 IEM_MC_END();
14672 return VINF_SUCCESS;
14673}
14674
14675
14676/** Opcode 0xdb 0xe1. */
14677FNIEMOP_DEF(iemOp_fndisi)
14678{
14679 IEMOP_MNEMONIC("fndisi (8087/ign)");
14680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14681 IEM_MC_BEGIN(0,0);
14682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14683 IEM_MC_ADVANCE_RIP();
14684 IEM_MC_END();
14685 return VINF_SUCCESS;
14686}
14687
14688
14689/** Opcode 0xdb 0xe2. */
14690FNIEMOP_DEF(iemOp_fnclex)
14691{
14692 IEMOP_MNEMONIC("fnclex");
14693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14694
14695 IEM_MC_BEGIN(0,0);
14696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14697 IEM_MC_CLEAR_FSW_EX();
14698 IEM_MC_ADVANCE_RIP();
14699 IEM_MC_END();
14700 return VINF_SUCCESS;
14701}
14702
14703
14704/** Opcode 0xdb 0xe3. */
14705FNIEMOP_DEF(iemOp_fninit)
14706{
14707 IEMOP_MNEMONIC("fninit");
14708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14709 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14710}
14711
14712
14713/** Opcode 0xdb 0xe4. */
14714FNIEMOP_DEF(iemOp_fnsetpm)
14715{
14716 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14718 IEM_MC_BEGIN(0,0);
14719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14720 IEM_MC_ADVANCE_RIP();
14721 IEM_MC_END();
14722 return VINF_SUCCESS;
14723}
14724
14725
14726/** Opcode 0xdb 0xe5. */
14727FNIEMOP_DEF(iemOp_frstpm)
14728{
14729 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14730#if 0 /* #UDs on newer CPUs */
14731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14732 IEM_MC_BEGIN(0,0);
14733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14734 IEM_MC_ADVANCE_RIP();
14735 IEM_MC_END();
14736 return VINF_SUCCESS;
14737#else
14738 return IEMOP_RAISE_INVALID_OPCODE();
14739#endif
14740}
14741
14742
14743/** Opcode 0xdb 11/5. */
14744FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14745{
14746 IEMOP_MNEMONIC("fucomi st0,stN");
14747 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14748}
14749
14750
14751/** Opcode 0xdb 11/6. */
14752FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14753{
14754 IEMOP_MNEMONIC("fcomi st0,stN");
14755 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14756}
14757
14758
14759/** Opcode 0xdb. */
14760FNIEMOP_DEF(iemOp_EscF3)
14761{
14762 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14765 {
14766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14767 {
14768 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14769 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14770 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14771 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14772 case 4:
14773 switch (bRm)
14774 {
14775 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14776 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14777 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14778 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14779 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14780 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14781 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14782 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14784 }
14785 break;
14786 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14787 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14788 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14790 }
14791 }
14792 else
14793 {
14794 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14795 {
14796 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14797 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14798 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14799 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14800 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14801 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14802 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14803 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14805 }
14806 }
14807}
14808
14809
14810/**
14811 * Common worker for FPU instructions working on STn and ST0, and storing the
14812 * result in STn unless IE, DE or ZE was raised.
14813 *
14814 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14815 */
14816FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14817{
14818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14819
14820 IEM_MC_BEGIN(3, 1);
14821 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14822 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14824 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14825
14826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14828
14829 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14830 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14831 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14832 IEM_MC_ELSE()
14833 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14834 IEM_MC_ENDIF();
14835 IEM_MC_USED_FPU();
14836 IEM_MC_ADVANCE_RIP();
14837
14838 IEM_MC_END();
14839 return VINF_SUCCESS;
14840}
14841
14842
14843/** Opcode 0xdc 11/0. */
14844FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14845{
14846 IEMOP_MNEMONIC("fadd stN,st0");
14847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14848}
14849
14850
14851/** Opcode 0xdc 11/1. */
14852FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14853{
14854 IEMOP_MNEMONIC("fmul stN,st0");
14855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14856}
14857
14858
14859/** Opcode 0xdc 11/4. */
14860FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14861{
14862 IEMOP_MNEMONIC("fsubr stN,st0");
14863 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14864}
14865
14866
14867/** Opcode 0xdc 11/5. */
14868FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14869{
14870 IEMOP_MNEMONIC("fsub stN,st0");
14871 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14872}
14873
14874
14875/** Opcode 0xdc 11/6. */
14876FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14877{
14878 IEMOP_MNEMONIC("fdivr stN,st0");
14879 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14880}
14881
14882
14883/** Opcode 0xdc 11/7. */
14884FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14885{
14886 IEMOP_MNEMONIC("fdiv stN,st0");
14887 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14888}
14889
14890
14891/**
14892 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14893 * memory operand, and storing the result in ST0.
14894 *
14895 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14896 */
14897FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14898{
14899 IEM_MC_BEGIN(3, 3);
14900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14901 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14902 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14903 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14904 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14905 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14906
14907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14911
14912 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14913 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14914 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14915 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14916 IEM_MC_ELSE()
14917 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14918 IEM_MC_ENDIF();
14919 IEM_MC_USED_FPU();
14920 IEM_MC_ADVANCE_RIP();
14921
14922 IEM_MC_END();
14923 return VINF_SUCCESS;
14924}
14925
14926
14927/** Opcode 0xdc !11/0. */
14928FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC("fadd m64r");
14931 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14932}
14933
14934
14935/** Opcode 0xdc !11/1. */
14936FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14937{
14938 IEMOP_MNEMONIC("fmul m64r");
14939 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14940}
14941
14942
14943/** Opcode 0xdc !11/2. */
14944FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14945{
14946 IEMOP_MNEMONIC("fcom st0,m64r");
14947
14948 IEM_MC_BEGIN(3, 3);
14949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14950 IEM_MC_LOCAL(uint16_t, u16Fsw);
14951 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14952 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14953 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14954 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14955
14956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14958
14959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14961 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14962
14963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14964 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14965 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14966 IEM_MC_ELSE()
14967 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14968 IEM_MC_ENDIF();
14969 IEM_MC_USED_FPU();
14970 IEM_MC_ADVANCE_RIP();
14971
14972 IEM_MC_END();
14973 return VINF_SUCCESS;
14974}
14975
14976
14977/** Opcode 0xdc !11/3. */
14978FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14979{
14980 IEMOP_MNEMONIC("fcomp st0,m64r");
14981
14982 IEM_MC_BEGIN(3, 3);
14983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14984 IEM_MC_LOCAL(uint16_t, u16Fsw);
14985 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14986 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14987 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14988 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14989
14990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14992
14993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14995 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14996
14997 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14998 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14999 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15000 IEM_MC_ELSE()
15001 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15002 IEM_MC_ENDIF();
15003 IEM_MC_USED_FPU();
15004 IEM_MC_ADVANCE_RIP();
15005
15006 IEM_MC_END();
15007 return VINF_SUCCESS;
15008}
15009
15010
15011/** Opcode 0xdc !11/4. */
15012FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15013{
15014 IEMOP_MNEMONIC("fsub m64r");
15015 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15016}
15017
15018
15019/** Opcode 0xdc !11/5. */
15020FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15021{
15022 IEMOP_MNEMONIC("fsubr m64r");
15023 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15024}
15025
15026
15027/** Opcode 0xdc !11/6. */
15028FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15029{
15030 IEMOP_MNEMONIC("fdiv m64r");
15031 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15032}
15033
15034
15035/** Opcode 0xdc !11/7. */
15036FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15037{
15038 IEMOP_MNEMONIC("fdivr m64r");
15039 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15040}
15041
15042
15043/** Opcode 0xdc. */
15044FNIEMOP_DEF(iemOp_EscF4)
15045{
15046 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15049 {
15050 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15051 {
15052 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15053 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15054 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15055 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15056 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15057 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15058 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15059 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15061 }
15062 }
15063 else
15064 {
15065 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15066 {
15067 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15068 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15069 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15070 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15071 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15072 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15073 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15074 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15076 }
15077 }
15078}
15079
15080
15081/** Opcode 0xdd !11/0.
15082 * @sa iemOp_fld_m32r */
15083FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15084{
15085 IEMOP_MNEMONIC("fld m64r");
15086
15087 IEM_MC_BEGIN(2, 3);
15088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15089 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15090 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15091 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15092 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15093
15094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15098
15099 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15100 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15101 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15102 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15103 IEM_MC_ELSE()
15104 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15105 IEM_MC_ENDIF();
15106 IEM_MC_USED_FPU();
15107 IEM_MC_ADVANCE_RIP();
15108
15109 IEM_MC_END();
15110 return VINF_SUCCESS;
15111}
15112
15113
15114/** Opcode 0xdd !11/0. */
15115FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15116{
15117 IEMOP_MNEMONIC("fisttp m64i");
15118 IEM_MC_BEGIN(3, 2);
15119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15120 IEM_MC_LOCAL(uint16_t, u16Fsw);
15121 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15122 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15124
15125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15128 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15129
15130 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15131 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15132 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15133 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15134 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15135 IEM_MC_ELSE()
15136 IEM_MC_IF_FCW_IM()
15137 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15138 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15139 IEM_MC_ENDIF();
15140 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15141 IEM_MC_ENDIF();
15142 IEM_MC_USED_FPU();
15143 IEM_MC_ADVANCE_RIP();
15144
15145 IEM_MC_END();
15146 return VINF_SUCCESS;
15147}
15148
15149
15150/** Opcode 0xdd !11/0. */
15151FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15152{
15153 IEMOP_MNEMONIC("fst m64r");
15154 IEM_MC_BEGIN(3, 2);
15155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15156 IEM_MC_LOCAL(uint16_t, u16Fsw);
15157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15158 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15160
15161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15165
15166 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15167 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15168 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15169 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15170 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15171 IEM_MC_ELSE()
15172 IEM_MC_IF_FCW_IM()
15173 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15174 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15175 IEM_MC_ENDIF();
15176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15177 IEM_MC_ENDIF();
15178 IEM_MC_USED_FPU();
15179 IEM_MC_ADVANCE_RIP();
15180
15181 IEM_MC_END();
15182 return VINF_SUCCESS;
15183}
15184
15185
15186
15187
15188/** Opcode 0xdd !11/0. */
15189FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15190{
15191 IEMOP_MNEMONIC("fstp m64r");
15192 IEM_MC_BEGIN(3, 2);
15193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15194 IEM_MC_LOCAL(uint16_t, u16Fsw);
15195 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15196 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15198
15199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15203
15204 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15206 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15207 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15208 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15209 IEM_MC_ELSE()
15210 IEM_MC_IF_FCW_IM()
15211 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15212 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15213 IEM_MC_ENDIF();
15214 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15215 IEM_MC_ENDIF();
15216 IEM_MC_USED_FPU();
15217 IEM_MC_ADVANCE_RIP();
15218
15219 IEM_MC_END();
15220 return VINF_SUCCESS;
15221}
15222
15223
15224/** Opcode 0xdd !11/0. */
15225FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15226{
15227 IEMOP_MNEMONIC("frstor m94/108byte");
15228 IEM_MC_BEGIN(3, 0);
15229 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15230 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15231 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15235 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15236 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15237 IEM_MC_END();
15238 return VINF_SUCCESS;
15239}
15240
15241
15242/** Opcode 0xdd !11/0. */
15243FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15244{
15245 IEMOP_MNEMONIC("fnsave m94/108byte");
15246 IEM_MC_BEGIN(3, 0);
15247 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15248 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15249 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15253 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15254 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15255 IEM_MC_END();
15256 return VINF_SUCCESS;
15257
15258}
15259
15260/** Opcode 0xdd !11/0. */
15261FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15262{
15263 IEMOP_MNEMONIC("fnstsw m16");
15264
15265 IEM_MC_BEGIN(0, 2);
15266 IEM_MC_LOCAL(uint16_t, u16Tmp);
15267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15268
15269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15272
15273 IEM_MC_FETCH_FSW(u16Tmp);
15274 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15275 IEM_MC_ADVANCE_RIP();
15276
15277/** @todo Debug / drop a hint to the verifier that things may differ
15278 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15279 * NT4SP1. (X86_FSW_PE) */
15280 IEM_MC_END();
15281 return VINF_SUCCESS;
15282}
15283
15284
15285/** Opcode 0xdd 11/0. */
15286FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15287{
15288 IEMOP_MNEMONIC("ffree stN");
15289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15290 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15291 unmodified. */
15292
15293 IEM_MC_BEGIN(0, 0);
15294
15295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15297
15298 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15299 IEM_MC_UPDATE_FPU_OPCODE_IP();
15300
15301 IEM_MC_USED_FPU();
15302 IEM_MC_ADVANCE_RIP();
15303 IEM_MC_END();
15304 return VINF_SUCCESS;
15305}
15306
15307
15308/** Opcode 0xdd 11/1. */
15309FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15310{
15311 IEMOP_MNEMONIC("fst st0,stN");
15312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15313
15314 IEM_MC_BEGIN(0, 2);
15315 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15316 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15317 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15318 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15320 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15321 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15322 IEM_MC_ELSE()
15323 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15324 IEM_MC_ENDIF();
15325 IEM_MC_USED_FPU();
15326 IEM_MC_ADVANCE_RIP();
15327 IEM_MC_END();
15328 return VINF_SUCCESS;
15329}
15330
15331
15332/** Opcode 0xdd 11/3. */
15333FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15334{
15335 IEMOP_MNEMONIC("fcom st0,stN");
15336 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15337}
15338
15339
15340/** Opcode 0xdd 11/4. */
15341FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15342{
15343 IEMOP_MNEMONIC("fcomp st0,stN");
15344 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15345}
15346
15347
15348/** Opcode 0xdd. */
15349FNIEMOP_DEF(iemOp_EscF5)
15350{
15351 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15354 {
15355 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15356 {
15357 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15358 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15359 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15360 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15361 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15362 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15363 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15364 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15366 }
15367 }
15368 else
15369 {
15370 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15371 {
15372 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15373 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15374 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15375 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15376 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15377 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15378 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15379 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15381 }
15382 }
15383}
15384
15385
15386/** Opcode 0xde 11/0. */
15387FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15388{
15389 IEMOP_MNEMONIC("faddp stN,st0");
15390 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15391}
15392
15393
15394/** Opcode 0xde 11/0. */
15395FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15396{
15397 IEMOP_MNEMONIC("fmulp stN,st0");
15398 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15399}
15400
15401
15402/** Opcode 0xde 0xd9. */
15403FNIEMOP_DEF(iemOp_fcompp)
15404{
15405 IEMOP_MNEMONIC("fucompp st0,stN");
15406 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15407}
15408
15409
15410/** Opcode 0xde 11/4. */
15411FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15412{
15413 IEMOP_MNEMONIC("fsubrp stN,st0");
15414 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15415}
15416
15417
15418/** Opcode 0xde 11/5. */
15419FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15420{
15421 IEMOP_MNEMONIC("fsubp stN,st0");
15422 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15423}
15424
15425
15426/** Opcode 0xde 11/6. */
15427FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15428{
15429 IEMOP_MNEMONIC("fdivrp stN,st0");
15430 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15431}
15432
15433
15434/** Opcode 0xde 11/7. */
15435FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15436{
15437 IEMOP_MNEMONIC("fdivp stN,st0");
15438 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15439}
15440
15441
15442/**
15443 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15444 * the result in ST0.
15445 *
15446 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15447 */
15448FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15449{
15450 IEM_MC_BEGIN(3, 3);
15451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15452 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15453 IEM_MC_LOCAL(int16_t, i16Val2);
15454 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15456 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15457
15458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15460
15461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15463 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15464
15465 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15466 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15467 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15468 IEM_MC_ELSE()
15469 IEM_MC_FPU_STACK_UNDERFLOW(0);
15470 IEM_MC_ENDIF();
15471 IEM_MC_USED_FPU();
15472 IEM_MC_ADVANCE_RIP();
15473
15474 IEM_MC_END();
15475 return VINF_SUCCESS;
15476}
15477
15478
15479/** Opcode 0xde !11/0. */
15480FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15481{
15482 IEMOP_MNEMONIC("fiadd m16i");
15483 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15484}
15485
15486
15487/** Opcode 0xde !11/1. */
15488FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC("fimul m16i");
15491 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15492}
15493
15494
15495/** Opcode 0xde !11/2. */
15496FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15497{
15498 IEMOP_MNEMONIC("ficom st0,m16i");
15499
15500 IEM_MC_BEGIN(3, 3);
15501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15502 IEM_MC_LOCAL(uint16_t, u16Fsw);
15503 IEM_MC_LOCAL(int16_t, i16Val2);
15504 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15505 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15506 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15507
15508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15510
15511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15513 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15514
15515 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15516 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15517 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15518 IEM_MC_ELSE()
15519 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15520 IEM_MC_ENDIF();
15521 IEM_MC_USED_FPU();
15522 IEM_MC_ADVANCE_RIP();
15523
15524 IEM_MC_END();
15525 return VINF_SUCCESS;
15526}
15527
15528
15529/** Opcode 0xde !11/3. */
15530FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15531{
15532 IEMOP_MNEMONIC("ficomp st0,m16i");
15533
15534 IEM_MC_BEGIN(3, 3);
15535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15536 IEM_MC_LOCAL(uint16_t, u16Fsw);
15537 IEM_MC_LOCAL(int16_t, i16Val2);
15538 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15539 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15540 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15541
15542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15544
15545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15546 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15547 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15548
15549 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15550 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15551 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15552 IEM_MC_ELSE()
15553 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15554 IEM_MC_ENDIF();
15555 IEM_MC_USED_FPU();
15556 IEM_MC_ADVANCE_RIP();
15557
15558 IEM_MC_END();
15559 return VINF_SUCCESS;
15560}
15561
15562
15563/** Opcode 0xde !11/4. */
15564FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15565{
15566 IEMOP_MNEMONIC("fisub m16i");
15567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15568}
15569
15570
15571/** Opcode 0xde !11/5. */
15572FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15573{
15574 IEMOP_MNEMONIC("fisubr m16i");
15575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15576}
15577
15578
15579/** Opcode 0xde !11/6. */
15580FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15581{
15582 IEMOP_MNEMONIC("fiadd m16i");
15583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15584}
15585
15586
15587/** Opcode 0xde !11/7. */
15588FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15589{
15590 IEMOP_MNEMONIC("fiadd m16i");
15591 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15592}
15593
15594
15595/** Opcode 0xde. */
15596FNIEMOP_DEF(iemOp_EscF6)
15597{
15598 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15601 {
15602 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15603 {
15604 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15605 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15606 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15607 case 3: if (bRm == 0xd9)
15608 return FNIEMOP_CALL(iemOp_fcompp);
15609 return IEMOP_RAISE_INVALID_OPCODE();
15610 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15611 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15612 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15613 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15615 }
15616 }
15617 else
15618 {
15619 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15620 {
15621 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15622 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15623 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15624 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15625 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15626 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15627 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15628 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15630 }
15631 }
15632}
15633
15634
15635/** Opcode 0xdf 11/0.
15636 * Undocument instruction, assumed to work like ffree + fincstp. */
15637FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15638{
15639 IEMOP_MNEMONIC("ffreep stN");
15640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15641
15642 IEM_MC_BEGIN(0, 0);
15643
15644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15646
15647 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15648 IEM_MC_FPU_STACK_INC_TOP();
15649 IEM_MC_UPDATE_FPU_OPCODE_IP();
15650
15651 IEM_MC_USED_FPU();
15652 IEM_MC_ADVANCE_RIP();
15653 IEM_MC_END();
15654 return VINF_SUCCESS;
15655}
15656
15657
15658/** Opcode 0xdf 0xe0. */
15659FNIEMOP_DEF(iemOp_fnstsw_ax)
15660{
15661 IEMOP_MNEMONIC("fnstsw ax");
15662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15663
15664 IEM_MC_BEGIN(0, 1);
15665 IEM_MC_LOCAL(uint16_t, u16Tmp);
15666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15667 IEM_MC_FETCH_FSW(u16Tmp);
15668 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15669 IEM_MC_ADVANCE_RIP();
15670 IEM_MC_END();
15671 return VINF_SUCCESS;
15672}
15673
15674
15675/** Opcode 0xdf 11/5. */
15676FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15677{
15678 IEMOP_MNEMONIC("fcomip st0,stN");
15679 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15680}
15681
15682
15683/** Opcode 0xdf 11/6. */
15684FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15685{
15686 IEMOP_MNEMONIC("fcomip st0,stN");
15687 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15688}
15689
15690
15691/** Opcode 0xdf !11/0. */
15692FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15693{
15694 IEMOP_MNEMONIC("fild m16i");
15695
15696 IEM_MC_BEGIN(2, 3);
15697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15698 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15699 IEM_MC_LOCAL(int16_t, i16Val);
15700 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15701 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15702
15703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15705
15706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15708 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15709
15710 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15711 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15712 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15713 IEM_MC_ELSE()
15714 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15715 IEM_MC_ENDIF();
15716 IEM_MC_USED_FPU();
15717 IEM_MC_ADVANCE_RIP();
15718
15719 IEM_MC_END();
15720 return VINF_SUCCESS;
15721}
15722
15723
15724/** Opcode 0xdf !11/1. */
15725FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15726{
15727 IEMOP_MNEMONIC("fisttp m16i");
15728 IEM_MC_BEGIN(3, 2);
15729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15730 IEM_MC_LOCAL(uint16_t, u16Fsw);
15731 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15732 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15733 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15734
15735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15739
15740 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15741 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15742 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15743 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15744 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15745 IEM_MC_ELSE()
15746 IEM_MC_IF_FCW_IM()
15747 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15748 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15749 IEM_MC_ENDIF();
15750 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15751 IEM_MC_ENDIF();
15752 IEM_MC_USED_FPU();
15753 IEM_MC_ADVANCE_RIP();
15754
15755 IEM_MC_END();
15756 return VINF_SUCCESS;
15757}
15758
15759
15760/** Opcode 0xdf !11/2. */
15761FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15762{
15763 IEMOP_MNEMONIC("fistp m16i");
15764 IEM_MC_BEGIN(3, 2);
15765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15766 IEM_MC_LOCAL(uint16_t, u16Fsw);
15767 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15768 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15770
15771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15775
15776 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15777 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15778 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15779 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15780 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15781 IEM_MC_ELSE()
15782 IEM_MC_IF_FCW_IM()
15783 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15784 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15785 IEM_MC_ENDIF();
15786 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15787 IEM_MC_ENDIF();
15788 IEM_MC_USED_FPU();
15789 IEM_MC_ADVANCE_RIP();
15790
15791 IEM_MC_END();
15792 return VINF_SUCCESS;
15793}
15794
15795
15796/** Opcode 0xdf !11/3. */
15797FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15798{
15799 IEMOP_MNEMONIC("fistp m16i");
15800 IEM_MC_BEGIN(3, 2);
15801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15802 IEM_MC_LOCAL(uint16_t, u16Fsw);
15803 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15804 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15806
15807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15811
15812 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15813 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15814 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15815 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15816 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15817 IEM_MC_ELSE()
15818 IEM_MC_IF_FCW_IM()
15819 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15820 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15821 IEM_MC_ENDIF();
15822 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15823 IEM_MC_ENDIF();
15824 IEM_MC_USED_FPU();
15825 IEM_MC_ADVANCE_RIP();
15826
15827 IEM_MC_END();
15828 return VINF_SUCCESS;
15829}
15830
15831
15832/** Opcode 0xdf !11/4. */
15833FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15834
15835
15836/** Opcode 0xdf !11/5. */
15837FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15838{
15839 IEMOP_MNEMONIC("fild m64i");
15840
15841 IEM_MC_BEGIN(2, 3);
15842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15843 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15844 IEM_MC_LOCAL(int64_t, i64Val);
15845 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15846 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15847
15848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15850
15851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15853 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15854
15855 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15856 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15857 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15858 IEM_MC_ELSE()
15859 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15860 IEM_MC_ENDIF();
15861 IEM_MC_USED_FPU();
15862 IEM_MC_ADVANCE_RIP();
15863
15864 IEM_MC_END();
15865 return VINF_SUCCESS;
15866}
15867
15868
15869/** Opcode 0xdf !11/6. */
15870FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15871
15872
15873/** Opcode 0xdf !11/7. */
15874FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15875{
15876 IEMOP_MNEMONIC("fistp m64i");
15877 IEM_MC_BEGIN(3, 2);
15878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15879 IEM_MC_LOCAL(uint16_t, u16Fsw);
15880 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15881 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15883
15884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15888
15889 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15890 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15891 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15892 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15893 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15894 IEM_MC_ELSE()
15895 IEM_MC_IF_FCW_IM()
15896 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15897 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15898 IEM_MC_ENDIF();
15899 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15900 IEM_MC_ENDIF();
15901 IEM_MC_USED_FPU();
15902 IEM_MC_ADVANCE_RIP();
15903
15904 IEM_MC_END();
15905 return VINF_SUCCESS;
15906}
15907
15908
15909/** Opcode 0xdf. */
15910FNIEMOP_DEF(iemOp_EscF7)
15911{
15912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15914 {
15915 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15916 {
15917 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15918 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15919 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15920 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15921 case 4: if (bRm == 0xe0)
15922 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15923 return IEMOP_RAISE_INVALID_OPCODE();
15924 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15925 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15926 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15928 }
15929 }
15930 else
15931 {
15932 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15933 {
15934 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15935 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15936 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15937 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15938 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15939 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15940 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15941 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15943 }
15944 }
15945}
15946
15947
15948/** Opcode 0xe0. */
15949FNIEMOP_DEF(iemOp_loopne_Jb)
15950{
15951 IEMOP_MNEMONIC("loopne Jb");
15952 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15953 IEMOP_HLP_NO_LOCK_PREFIX();
15954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15955
15956 switch (pIemCpu->enmEffAddrMode)
15957 {
15958 case IEMMODE_16BIT:
15959 IEM_MC_BEGIN(0,0);
15960 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15961 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15962 IEM_MC_REL_JMP_S8(i8Imm);
15963 } IEM_MC_ELSE() {
15964 IEM_MC_ADVANCE_RIP();
15965 } IEM_MC_ENDIF();
15966 IEM_MC_END();
15967 return VINF_SUCCESS;
15968
15969 case IEMMODE_32BIT:
15970 IEM_MC_BEGIN(0,0);
15971 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15972 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15973 IEM_MC_REL_JMP_S8(i8Imm);
15974 } IEM_MC_ELSE() {
15975 IEM_MC_ADVANCE_RIP();
15976 } IEM_MC_ENDIF();
15977 IEM_MC_END();
15978 return VINF_SUCCESS;
15979
15980 case IEMMODE_64BIT:
15981 IEM_MC_BEGIN(0,0);
15982 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15983 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15984 IEM_MC_REL_JMP_S8(i8Imm);
15985 } IEM_MC_ELSE() {
15986 IEM_MC_ADVANCE_RIP();
15987 } IEM_MC_ENDIF();
15988 IEM_MC_END();
15989 return VINF_SUCCESS;
15990
15991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15992 }
15993}
15994
15995
15996/** Opcode 0xe1. */
15997FNIEMOP_DEF(iemOp_loope_Jb)
15998{
15999 IEMOP_MNEMONIC("loope Jb");
16000 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16001 IEMOP_HLP_NO_LOCK_PREFIX();
16002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16003
16004 switch (pIemCpu->enmEffAddrMode)
16005 {
16006 case IEMMODE_16BIT:
16007 IEM_MC_BEGIN(0,0);
16008 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16009 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16010 IEM_MC_REL_JMP_S8(i8Imm);
16011 } IEM_MC_ELSE() {
16012 IEM_MC_ADVANCE_RIP();
16013 } IEM_MC_ENDIF();
16014 IEM_MC_END();
16015 return VINF_SUCCESS;
16016
16017 case IEMMODE_32BIT:
16018 IEM_MC_BEGIN(0,0);
16019 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16020 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16021 IEM_MC_REL_JMP_S8(i8Imm);
16022 } IEM_MC_ELSE() {
16023 IEM_MC_ADVANCE_RIP();
16024 } IEM_MC_ENDIF();
16025 IEM_MC_END();
16026 return VINF_SUCCESS;
16027
16028 case IEMMODE_64BIT:
16029 IEM_MC_BEGIN(0,0);
16030 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16031 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16032 IEM_MC_REL_JMP_S8(i8Imm);
16033 } IEM_MC_ELSE() {
16034 IEM_MC_ADVANCE_RIP();
16035 } IEM_MC_ENDIF();
16036 IEM_MC_END();
16037 return VINF_SUCCESS;
16038
16039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16040 }
16041}
16042
16043
16044/** Opcode 0xe2. */
16045FNIEMOP_DEF(iemOp_loop_Jb)
16046{
16047 IEMOP_MNEMONIC("loop Jb");
16048 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16049 IEMOP_HLP_NO_LOCK_PREFIX();
16050 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16051
16052 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16053 * using the 32-bit operand size override. How can that be restarted? See
16054 * weird pseudo code in intel manual. */
16055 switch (pIemCpu->enmEffAddrMode)
16056 {
16057 case IEMMODE_16BIT:
16058 IEM_MC_BEGIN(0,0);
16059 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16060 {
16061 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16062 IEM_MC_IF_CX_IS_NZ() {
16063 IEM_MC_REL_JMP_S8(i8Imm);
16064 } IEM_MC_ELSE() {
16065 IEM_MC_ADVANCE_RIP();
16066 } IEM_MC_ENDIF();
16067 }
16068 else
16069 {
16070 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16071 IEM_MC_ADVANCE_RIP();
16072 }
16073 IEM_MC_END();
16074 return VINF_SUCCESS;
16075
16076 case IEMMODE_32BIT:
16077 IEM_MC_BEGIN(0,0);
16078 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16079 {
16080 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16081 IEM_MC_IF_ECX_IS_NZ() {
16082 IEM_MC_REL_JMP_S8(i8Imm);
16083 } IEM_MC_ELSE() {
16084 IEM_MC_ADVANCE_RIP();
16085 } IEM_MC_ENDIF();
16086 }
16087 else
16088 {
16089 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16090 IEM_MC_ADVANCE_RIP();
16091 }
16092 IEM_MC_END();
16093 return VINF_SUCCESS;
16094
16095 case IEMMODE_64BIT:
16096 IEM_MC_BEGIN(0,0);
16097 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16098 {
16099 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16100 IEM_MC_IF_RCX_IS_NZ() {
16101 IEM_MC_REL_JMP_S8(i8Imm);
16102 } IEM_MC_ELSE() {
16103 IEM_MC_ADVANCE_RIP();
16104 } IEM_MC_ENDIF();
16105 }
16106 else
16107 {
16108 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16109 IEM_MC_ADVANCE_RIP();
16110 }
16111 IEM_MC_END();
16112 return VINF_SUCCESS;
16113
16114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16115 }
16116}
16117
16118
16119/** Opcode 0xe3. */
16120FNIEMOP_DEF(iemOp_jecxz_Jb)
16121{
16122 IEMOP_MNEMONIC("jecxz Jb");
16123 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16124 IEMOP_HLP_NO_LOCK_PREFIX();
16125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16126
16127 switch (pIemCpu->enmEffAddrMode)
16128 {
16129 case IEMMODE_16BIT:
16130 IEM_MC_BEGIN(0,0);
16131 IEM_MC_IF_CX_IS_NZ() {
16132 IEM_MC_ADVANCE_RIP();
16133 } IEM_MC_ELSE() {
16134 IEM_MC_REL_JMP_S8(i8Imm);
16135 } IEM_MC_ENDIF();
16136 IEM_MC_END();
16137 return VINF_SUCCESS;
16138
16139 case IEMMODE_32BIT:
16140 IEM_MC_BEGIN(0,0);
16141 IEM_MC_IF_ECX_IS_NZ() {
16142 IEM_MC_ADVANCE_RIP();
16143 } IEM_MC_ELSE() {
16144 IEM_MC_REL_JMP_S8(i8Imm);
16145 } IEM_MC_ENDIF();
16146 IEM_MC_END();
16147 return VINF_SUCCESS;
16148
16149 case IEMMODE_64BIT:
16150 IEM_MC_BEGIN(0,0);
16151 IEM_MC_IF_RCX_IS_NZ() {
16152 IEM_MC_ADVANCE_RIP();
16153 } IEM_MC_ELSE() {
16154 IEM_MC_REL_JMP_S8(i8Imm);
16155 } IEM_MC_ENDIF();
16156 IEM_MC_END();
16157 return VINF_SUCCESS;
16158
16159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16160 }
16161}
16162
16163
16164/** Opcode 0xe4 */
16165FNIEMOP_DEF(iemOp_in_AL_Ib)
16166{
16167 IEMOP_MNEMONIC("in eAX,Ib");
16168 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16169 IEMOP_HLP_NO_LOCK_PREFIX();
16170 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16171}
16172
16173
16174/** Opcode 0xe5 */
16175FNIEMOP_DEF(iemOp_in_eAX_Ib)
16176{
16177 IEMOP_MNEMONIC("in eAX,Ib");
16178 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16179 IEMOP_HLP_NO_LOCK_PREFIX();
16180 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16181}
16182
16183
16184/** Opcode 0xe6 */
16185FNIEMOP_DEF(iemOp_out_Ib_AL)
16186{
16187 IEMOP_MNEMONIC("out Ib,AL");
16188 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16189 IEMOP_HLP_NO_LOCK_PREFIX();
16190 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16191}
16192
16193
16194/** Opcode 0xe7 */
16195FNIEMOP_DEF(iemOp_out_Ib_eAX)
16196{
16197 IEMOP_MNEMONIC("out Ib,eAX");
16198 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16199 IEMOP_HLP_NO_LOCK_PREFIX();
16200 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16201}
16202
16203
16204/** Opcode 0xe8. */
16205FNIEMOP_DEF(iemOp_call_Jv)
16206{
16207 IEMOP_MNEMONIC("call Jv");
16208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16209 switch (pIemCpu->enmEffOpSize)
16210 {
16211 case IEMMODE_16BIT:
16212 {
16213 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16214 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16215 }
16216
16217 case IEMMODE_32BIT:
16218 {
16219 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16220 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16221 }
16222
16223 case IEMMODE_64BIT:
16224 {
16225 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16226 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16227 }
16228
16229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16230 }
16231}
16232
16233
16234/** Opcode 0xe9. */
16235FNIEMOP_DEF(iemOp_jmp_Jv)
16236{
16237 IEMOP_MNEMONIC("jmp Jv");
16238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16239 switch (pIemCpu->enmEffOpSize)
16240 {
16241 case IEMMODE_16BIT:
16242 {
16243 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16244 IEM_MC_BEGIN(0, 0);
16245 IEM_MC_REL_JMP_S16(i16Imm);
16246 IEM_MC_END();
16247 return VINF_SUCCESS;
16248 }
16249
16250 case IEMMODE_64BIT:
16251 case IEMMODE_32BIT:
16252 {
16253 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16254 IEM_MC_BEGIN(0, 0);
16255 IEM_MC_REL_JMP_S32(i32Imm);
16256 IEM_MC_END();
16257 return VINF_SUCCESS;
16258 }
16259
16260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16261 }
16262}
16263
16264
16265/** Opcode 0xea. */
16266FNIEMOP_DEF(iemOp_jmp_Ap)
16267{
16268 IEMOP_MNEMONIC("jmp Ap");
16269 IEMOP_HLP_NO_64BIT();
16270
16271 /* Decode the far pointer address and pass it on to the far call C implementation. */
16272 uint32_t offSeg;
16273 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16274 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16275 else
16276 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16277 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16278 IEMOP_HLP_NO_LOCK_PREFIX();
16279 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16280}
16281
16282
16283/** Opcode 0xeb. */
16284FNIEMOP_DEF(iemOp_jmp_Jb)
16285{
16286 IEMOP_MNEMONIC("jmp Jb");
16287 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16288 IEMOP_HLP_NO_LOCK_PREFIX();
16289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16290
16291 IEM_MC_BEGIN(0, 0);
16292 IEM_MC_REL_JMP_S8(i8Imm);
16293 IEM_MC_END();
16294 return VINF_SUCCESS;
16295}
16296
16297
16298/** Opcode 0xec */
16299FNIEMOP_DEF(iemOp_in_AL_DX)
16300{
16301 IEMOP_MNEMONIC("in AL,DX");
16302 IEMOP_HLP_NO_LOCK_PREFIX();
16303 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16304}
16305
16306
16307/** Opcode 0xed */
16308FNIEMOP_DEF(iemOp_eAX_DX)
16309{
16310 IEMOP_MNEMONIC("in eAX,DX");
16311 IEMOP_HLP_NO_LOCK_PREFIX();
16312 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16313}
16314
16315
16316/** Opcode 0xee */
16317FNIEMOP_DEF(iemOp_out_DX_AL)
16318{
16319 IEMOP_MNEMONIC("out DX,AL");
16320 IEMOP_HLP_NO_LOCK_PREFIX();
16321 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16322}
16323
16324
16325/** Opcode 0xef */
16326FNIEMOP_DEF(iemOp_out_DX_eAX)
16327{
16328 IEMOP_MNEMONIC("out DX,eAX");
16329 IEMOP_HLP_NO_LOCK_PREFIX();
16330 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16331}
16332
16333
16334/** Opcode 0xf0. */
16335FNIEMOP_DEF(iemOp_lock)
16336{
16337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16338 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16339
16340 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16341 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16342}
16343
16344
16345/** Opcode 0xf1. */
16346FNIEMOP_DEF(iemOp_int_1)
16347{
16348 IEMOP_MNEMONIC("int1"); /* icebp */
16349 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16350 /** @todo testcase! */
16351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16352}
16353
16354
16355/** Opcode 0xf2. */
16356FNIEMOP_DEF(iemOp_repne)
16357{
16358 /* This overrides any previous REPE prefix. */
16359 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16360 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16361 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16362
16363 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16364 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16365}
16366
16367
16368/** Opcode 0xf3. */
16369FNIEMOP_DEF(iemOp_repe)
16370{
16371 /* This overrides any previous REPNE prefix. */
16372 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16373 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16374 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16375
16376 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16377 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16378}
16379
16380
16381/** Opcode 0xf4. */
16382FNIEMOP_DEF(iemOp_hlt)
16383{
16384 IEMOP_HLP_NO_LOCK_PREFIX();
16385#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC && 0
16386 if ( pIemCpu->uTargetCpu == IEMTARGETCPU_CURRENT
16387 && pIemCpu->CTX_SUFF(pCtx)->cs.Sel <= 1000)
16388 {
16389 pIemCpu->uTargetCpu = IEMTARGETCPU_286;
16390 LogAlways(("\niemOp_hlt: Enabled CPU restrictions!\n\n"));
16391 }
16392#endif
16393 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16394}
16395
16396
16397/** Opcode 0xf5. */
16398FNIEMOP_DEF(iemOp_cmc)
16399{
16400 IEMOP_MNEMONIC("cmc");
16401 IEMOP_HLP_NO_LOCK_PREFIX();
16402 IEM_MC_BEGIN(0, 0);
16403 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16404 IEM_MC_ADVANCE_RIP();
16405 IEM_MC_END();
16406 return VINF_SUCCESS;
16407}
16408
16409
16410/**
16411 * Common implementation of 'inc/dec/not/neg Eb'.
16412 *
16413 * @param bRm The RM byte.
16414 * @param pImpl The instruction implementation.
16415 */
16416FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16417{
16418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16419 {
16420 /* register access */
16421 IEM_MC_BEGIN(2, 0);
16422 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16423 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16424 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16425 IEM_MC_REF_EFLAGS(pEFlags);
16426 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16427 IEM_MC_ADVANCE_RIP();
16428 IEM_MC_END();
16429 }
16430 else
16431 {
16432 /* memory access. */
16433 IEM_MC_BEGIN(2, 2);
16434 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16435 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16437
16438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16439 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16440 IEM_MC_FETCH_EFLAGS(EFlags);
16441 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16442 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16443 else
16444 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16445
16446 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16447 IEM_MC_COMMIT_EFLAGS(EFlags);
16448 IEM_MC_ADVANCE_RIP();
16449 IEM_MC_END();
16450 }
16451 return VINF_SUCCESS;
16452}
16453
16454
16455/**
16456 * Common implementation of 'inc/dec/not/neg Ev'.
16457 *
16458 * @param bRm The RM byte.
16459 * @param pImpl The instruction implementation.
16460 */
16461FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16462{
16463 /* Registers are handled by a common worker. */
16464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16465 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16466
16467 /* Memory we do here. */
16468 switch (pIemCpu->enmEffOpSize)
16469 {
16470 case IEMMODE_16BIT:
16471 IEM_MC_BEGIN(2, 2);
16472 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16473 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16475
16476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16477 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16478 IEM_MC_FETCH_EFLAGS(EFlags);
16479 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16480 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16481 else
16482 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16483
16484 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16485 IEM_MC_COMMIT_EFLAGS(EFlags);
16486 IEM_MC_ADVANCE_RIP();
16487 IEM_MC_END();
16488 return VINF_SUCCESS;
16489
16490 case IEMMODE_32BIT:
16491 IEM_MC_BEGIN(2, 2);
16492 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16495
16496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16497 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16498 IEM_MC_FETCH_EFLAGS(EFlags);
16499 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16500 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16501 else
16502 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16503
16504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16505 IEM_MC_COMMIT_EFLAGS(EFlags);
16506 IEM_MC_ADVANCE_RIP();
16507 IEM_MC_END();
16508 return VINF_SUCCESS;
16509
16510 case IEMMODE_64BIT:
16511 IEM_MC_BEGIN(2, 2);
16512 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16513 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16515
16516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16517 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16518 IEM_MC_FETCH_EFLAGS(EFlags);
16519 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16520 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16521 else
16522 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16523
16524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16525 IEM_MC_COMMIT_EFLAGS(EFlags);
16526 IEM_MC_ADVANCE_RIP();
16527 IEM_MC_END();
16528 return VINF_SUCCESS;
16529
16530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16531 }
16532}
16533
16534
16535/** Opcode 0xf6 /0. */
16536FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16537{
16538 IEMOP_MNEMONIC("test Eb,Ib");
16539 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16540
16541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16542 {
16543 /* register access */
16544 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16545 IEMOP_HLP_NO_LOCK_PREFIX();
16546
16547 IEM_MC_BEGIN(3, 0);
16548 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16549 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16550 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16551 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16552 IEM_MC_REF_EFLAGS(pEFlags);
16553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16554 IEM_MC_ADVANCE_RIP();
16555 IEM_MC_END();
16556 }
16557 else
16558 {
16559 /* memory access. */
16560 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16561
16562 IEM_MC_BEGIN(3, 2);
16563 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16564 IEM_MC_ARG(uint8_t, u8Src, 1);
16565 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16567
16568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16569 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16570 IEM_MC_ASSIGN(u8Src, u8Imm);
16571 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16572 IEM_MC_FETCH_EFLAGS(EFlags);
16573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16574
16575 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16576 IEM_MC_COMMIT_EFLAGS(EFlags);
16577 IEM_MC_ADVANCE_RIP();
16578 IEM_MC_END();
16579 }
16580 return VINF_SUCCESS;
16581}
16582
16583
16584/** Opcode 0xf7 /0. */
16585FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16586{
16587 IEMOP_MNEMONIC("test Ev,Iv");
16588 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16590
16591 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16592 {
16593 /* register access */
16594 switch (pIemCpu->enmEffOpSize)
16595 {
16596 case IEMMODE_16BIT:
16597 {
16598 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16599 IEM_MC_BEGIN(3, 0);
16600 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16601 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16602 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16603 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16604 IEM_MC_REF_EFLAGS(pEFlags);
16605 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16606 IEM_MC_ADVANCE_RIP();
16607 IEM_MC_END();
16608 return VINF_SUCCESS;
16609 }
16610
16611 case IEMMODE_32BIT:
16612 {
16613 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16614 IEM_MC_BEGIN(3, 0);
16615 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16616 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16617 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16618 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16619 IEM_MC_REF_EFLAGS(pEFlags);
16620 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16621 /* No clearing the high dword here - test doesn't write back the result. */
16622 IEM_MC_ADVANCE_RIP();
16623 IEM_MC_END();
16624 return VINF_SUCCESS;
16625 }
16626
16627 case IEMMODE_64BIT:
16628 {
16629 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16630 IEM_MC_BEGIN(3, 0);
16631 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16632 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16634 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16635 IEM_MC_REF_EFLAGS(pEFlags);
16636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16637 IEM_MC_ADVANCE_RIP();
16638 IEM_MC_END();
16639 return VINF_SUCCESS;
16640 }
16641
16642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16643 }
16644 }
16645 else
16646 {
16647 /* memory access. */
16648 switch (pIemCpu->enmEffOpSize)
16649 {
16650 case IEMMODE_16BIT:
16651 {
16652 IEM_MC_BEGIN(3, 2);
16653 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16654 IEM_MC_ARG(uint16_t, u16Src, 1);
16655 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16657
16658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16659 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16660 IEM_MC_ASSIGN(u16Src, u16Imm);
16661 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16662 IEM_MC_FETCH_EFLAGS(EFlags);
16663 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16664
16665 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16666 IEM_MC_COMMIT_EFLAGS(EFlags);
16667 IEM_MC_ADVANCE_RIP();
16668 IEM_MC_END();
16669 return VINF_SUCCESS;
16670 }
16671
16672 case IEMMODE_32BIT:
16673 {
16674 IEM_MC_BEGIN(3, 2);
16675 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16676 IEM_MC_ARG(uint32_t, u32Src, 1);
16677 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16679
16680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16681 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16682 IEM_MC_ASSIGN(u32Src, u32Imm);
16683 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16684 IEM_MC_FETCH_EFLAGS(EFlags);
16685 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16686
16687 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16688 IEM_MC_COMMIT_EFLAGS(EFlags);
16689 IEM_MC_ADVANCE_RIP();
16690 IEM_MC_END();
16691 return VINF_SUCCESS;
16692 }
16693
16694 case IEMMODE_64BIT:
16695 {
16696 IEM_MC_BEGIN(3, 2);
16697 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16698 IEM_MC_ARG(uint64_t, u64Src, 1);
16699 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16701
16702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16703 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16704 IEM_MC_ASSIGN(u64Src, u64Imm);
16705 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16706 IEM_MC_FETCH_EFLAGS(EFlags);
16707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16708
16709 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16710 IEM_MC_COMMIT_EFLAGS(EFlags);
16711 IEM_MC_ADVANCE_RIP();
16712 IEM_MC_END();
16713 return VINF_SUCCESS;
16714 }
16715
16716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16717 }
16718 }
16719}
16720
16721
16722/** Opcode 0xf6 /4, /5, /6 and /7. */
16723FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16724{
16725 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16726
16727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16728 {
16729 /* register access */
16730 IEMOP_HLP_NO_LOCK_PREFIX();
16731 IEM_MC_BEGIN(3, 1);
16732 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16733 IEM_MC_ARG(uint8_t, u8Value, 1);
16734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16735 IEM_MC_LOCAL(int32_t, rc);
16736
16737 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16738 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16739 IEM_MC_REF_EFLAGS(pEFlags);
16740 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16741 IEM_MC_IF_LOCAL_IS_Z(rc) {
16742 IEM_MC_ADVANCE_RIP();
16743 } IEM_MC_ELSE() {
16744 IEM_MC_RAISE_DIVIDE_ERROR();
16745 } IEM_MC_ENDIF();
16746
16747 IEM_MC_END();
16748 }
16749 else
16750 {
16751 /* memory access. */
16752 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16753
16754 IEM_MC_BEGIN(3, 2);
16755 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16756 IEM_MC_ARG(uint8_t, u8Value, 1);
16757 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16759 IEM_MC_LOCAL(int32_t, rc);
16760
16761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16762 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16763 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16764 IEM_MC_REF_EFLAGS(pEFlags);
16765 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16766 IEM_MC_IF_LOCAL_IS_Z(rc) {
16767 IEM_MC_ADVANCE_RIP();
16768 } IEM_MC_ELSE() {
16769 IEM_MC_RAISE_DIVIDE_ERROR();
16770 } IEM_MC_ENDIF();
16771
16772 IEM_MC_END();
16773 }
16774 return VINF_SUCCESS;
16775}
16776
16777
16778/** Opcode 0xf7 /4, /5, /6 and /7. */
16779FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16780{
16781 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16783
16784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16785 {
16786 /* register access */
16787 switch (pIemCpu->enmEffOpSize)
16788 {
16789 case IEMMODE_16BIT:
16790 {
16791 IEMOP_HLP_NO_LOCK_PREFIX();
16792 IEM_MC_BEGIN(4, 1);
16793 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16794 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16795 IEM_MC_ARG(uint16_t, u16Value, 2);
16796 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16797 IEM_MC_LOCAL(int32_t, rc);
16798
16799 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16800 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16801 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16802 IEM_MC_REF_EFLAGS(pEFlags);
16803 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16804 IEM_MC_IF_LOCAL_IS_Z(rc) {
16805 IEM_MC_ADVANCE_RIP();
16806 } IEM_MC_ELSE() {
16807 IEM_MC_RAISE_DIVIDE_ERROR();
16808 } IEM_MC_ENDIF();
16809
16810 IEM_MC_END();
16811 return VINF_SUCCESS;
16812 }
16813
16814 case IEMMODE_32BIT:
16815 {
16816 IEMOP_HLP_NO_LOCK_PREFIX();
16817 IEM_MC_BEGIN(4, 1);
16818 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16819 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16820 IEM_MC_ARG(uint32_t, u32Value, 2);
16821 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16822 IEM_MC_LOCAL(int32_t, rc);
16823
16824 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16825 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16826 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16827 IEM_MC_REF_EFLAGS(pEFlags);
16828 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16829 IEM_MC_IF_LOCAL_IS_Z(rc) {
16830 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16831 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16832 IEM_MC_ADVANCE_RIP();
16833 } IEM_MC_ELSE() {
16834 IEM_MC_RAISE_DIVIDE_ERROR();
16835 } IEM_MC_ENDIF();
16836
16837 IEM_MC_END();
16838 return VINF_SUCCESS;
16839 }
16840
16841 case IEMMODE_64BIT:
16842 {
16843 IEMOP_HLP_NO_LOCK_PREFIX();
16844 IEM_MC_BEGIN(4, 1);
16845 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16846 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16847 IEM_MC_ARG(uint64_t, u64Value, 2);
16848 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16849 IEM_MC_LOCAL(int32_t, rc);
16850
16851 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16852 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16853 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16854 IEM_MC_REF_EFLAGS(pEFlags);
16855 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16856 IEM_MC_IF_LOCAL_IS_Z(rc) {
16857 IEM_MC_ADVANCE_RIP();
16858 } IEM_MC_ELSE() {
16859 IEM_MC_RAISE_DIVIDE_ERROR();
16860 } IEM_MC_ENDIF();
16861
16862 IEM_MC_END();
16863 return VINF_SUCCESS;
16864 }
16865
16866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16867 }
16868 }
16869 else
16870 {
16871 /* memory access. */
16872 switch (pIemCpu->enmEffOpSize)
16873 {
16874 case IEMMODE_16BIT:
16875 {
16876 IEMOP_HLP_NO_LOCK_PREFIX();
16877 IEM_MC_BEGIN(4, 2);
16878 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16879 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16880 IEM_MC_ARG(uint16_t, u16Value, 2);
16881 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16883 IEM_MC_LOCAL(int32_t, rc);
16884
16885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16886 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16887 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16888 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16889 IEM_MC_REF_EFLAGS(pEFlags);
16890 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16891 IEM_MC_IF_LOCAL_IS_Z(rc) {
16892 IEM_MC_ADVANCE_RIP();
16893 } IEM_MC_ELSE() {
16894 IEM_MC_RAISE_DIVIDE_ERROR();
16895 } IEM_MC_ENDIF();
16896
16897 IEM_MC_END();
16898 return VINF_SUCCESS;
16899 }
16900
16901 case IEMMODE_32BIT:
16902 {
16903 IEMOP_HLP_NO_LOCK_PREFIX();
16904 IEM_MC_BEGIN(4, 2);
16905 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16906 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16907 IEM_MC_ARG(uint32_t, u32Value, 2);
16908 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16910 IEM_MC_LOCAL(int32_t, rc);
16911
16912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16913 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16914 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16915 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16916 IEM_MC_REF_EFLAGS(pEFlags);
16917 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16918 IEM_MC_IF_LOCAL_IS_Z(rc) {
16919 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16920 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16921 IEM_MC_ADVANCE_RIP();
16922 } IEM_MC_ELSE() {
16923 IEM_MC_RAISE_DIVIDE_ERROR();
16924 } IEM_MC_ENDIF();
16925
16926 IEM_MC_END();
16927 return VINF_SUCCESS;
16928 }
16929
16930 case IEMMODE_64BIT:
16931 {
16932 IEMOP_HLP_NO_LOCK_PREFIX();
16933 IEM_MC_BEGIN(4, 2);
16934 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16935 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16936 IEM_MC_ARG(uint64_t, u64Value, 2);
16937 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16939 IEM_MC_LOCAL(int32_t, rc);
16940
16941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16942 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16943 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16944 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16945 IEM_MC_REF_EFLAGS(pEFlags);
16946 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16947 IEM_MC_IF_LOCAL_IS_Z(rc) {
16948 IEM_MC_ADVANCE_RIP();
16949 } IEM_MC_ELSE() {
16950 IEM_MC_RAISE_DIVIDE_ERROR();
16951 } IEM_MC_ENDIF();
16952
16953 IEM_MC_END();
16954 return VINF_SUCCESS;
16955 }
16956
16957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16958 }
16959 }
16960}
16961
16962/** Opcode 0xf6. */
16963FNIEMOP_DEF(iemOp_Grp3_Eb)
16964{
16965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16966 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16967 {
16968 case 0:
16969 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16970 case 1:
16971/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16972 return IEMOP_RAISE_INVALID_OPCODE();
16973 case 2:
16974 IEMOP_MNEMONIC("not Eb");
16975 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16976 case 3:
16977 IEMOP_MNEMONIC("neg Eb");
16978 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16979 case 4:
16980 IEMOP_MNEMONIC("mul Eb");
16981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16982 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16983 case 5:
16984 IEMOP_MNEMONIC("imul Eb");
16985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16986 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16987 case 6:
16988 IEMOP_MNEMONIC("div Eb");
16989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16990 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16991 case 7:
16992 IEMOP_MNEMONIC("idiv Eb");
16993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16994 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16996 }
16997}
16998
16999
17000/** Opcode 0xf7. */
17001FNIEMOP_DEF(iemOp_Grp3_Ev)
17002{
17003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17004 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17005 {
17006 case 0:
17007 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17008 case 1:
17009/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17010 return IEMOP_RAISE_INVALID_OPCODE();
17011 case 2:
17012 IEMOP_MNEMONIC("not Ev");
17013 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17014 case 3:
17015 IEMOP_MNEMONIC("neg Ev");
17016 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17017 case 4:
17018 IEMOP_MNEMONIC("mul Ev");
17019 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17020 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17021 case 5:
17022 IEMOP_MNEMONIC("imul Ev");
17023 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17024 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17025 case 6:
17026 IEMOP_MNEMONIC("div Ev");
17027 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17028 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17029 case 7:
17030 IEMOP_MNEMONIC("idiv Ev");
17031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17032 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17034 }
17035}
17036
17037
17038/** Opcode 0xf8. */
17039FNIEMOP_DEF(iemOp_clc)
17040{
17041 IEMOP_MNEMONIC("clc");
17042 IEMOP_HLP_NO_LOCK_PREFIX();
17043 IEM_MC_BEGIN(0, 0);
17044 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17045 IEM_MC_ADVANCE_RIP();
17046 IEM_MC_END();
17047 return VINF_SUCCESS;
17048}
17049
17050
17051/** Opcode 0xf9. */
17052FNIEMOP_DEF(iemOp_stc)
17053{
17054 IEMOP_MNEMONIC("stc");
17055 IEMOP_HLP_NO_LOCK_PREFIX();
17056 IEM_MC_BEGIN(0, 0);
17057 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17058 IEM_MC_ADVANCE_RIP();
17059 IEM_MC_END();
17060 return VINF_SUCCESS;
17061}
17062
17063
17064/** Opcode 0xfa. */
17065FNIEMOP_DEF(iemOp_cli)
17066{
17067 IEMOP_MNEMONIC("cli");
17068 IEMOP_HLP_NO_LOCK_PREFIX();
17069 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17070}
17071
17072
17073FNIEMOP_DEF(iemOp_sti)
17074{
17075 IEMOP_MNEMONIC("sti");
17076 IEMOP_HLP_NO_LOCK_PREFIX();
17077 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17078}
17079
17080
17081/** Opcode 0xfc. */
17082FNIEMOP_DEF(iemOp_cld)
17083{
17084 IEMOP_MNEMONIC("cld");
17085 IEMOP_HLP_NO_LOCK_PREFIX();
17086 IEM_MC_BEGIN(0, 0);
17087 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17088 IEM_MC_ADVANCE_RIP();
17089 IEM_MC_END();
17090 return VINF_SUCCESS;
17091}
17092
17093
17094/** Opcode 0xfd. */
17095FNIEMOP_DEF(iemOp_std)
17096{
17097 IEMOP_MNEMONIC("std");
17098 IEMOP_HLP_NO_LOCK_PREFIX();
17099 IEM_MC_BEGIN(0, 0);
17100 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17101 IEM_MC_ADVANCE_RIP();
17102 IEM_MC_END();
17103 return VINF_SUCCESS;
17104}
17105
17106
17107/** Opcode 0xfe. */
17108FNIEMOP_DEF(iemOp_Grp4)
17109{
17110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17111 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17112 {
17113 case 0:
17114 IEMOP_MNEMONIC("inc Ev");
17115 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17116 case 1:
17117 IEMOP_MNEMONIC("dec Ev");
17118 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17119 default:
17120 IEMOP_MNEMONIC("grp4-ud");
17121 return IEMOP_RAISE_INVALID_OPCODE();
17122 }
17123}
17124
17125
17126/**
17127 * Opcode 0xff /2.
17128 * @param bRm The RM byte.
17129 */
17130FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17131{
17132 IEMOP_MNEMONIC("calln Ev");
17133 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17135
17136 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17137 {
17138 /* The new RIP is taken from a register. */
17139 switch (pIemCpu->enmEffOpSize)
17140 {
17141 case IEMMODE_16BIT:
17142 IEM_MC_BEGIN(1, 0);
17143 IEM_MC_ARG(uint16_t, u16Target, 0);
17144 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17145 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17146 IEM_MC_END()
17147 return VINF_SUCCESS;
17148
17149 case IEMMODE_32BIT:
17150 IEM_MC_BEGIN(1, 0);
17151 IEM_MC_ARG(uint32_t, u32Target, 0);
17152 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17153 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17154 IEM_MC_END()
17155 return VINF_SUCCESS;
17156
17157 case IEMMODE_64BIT:
17158 IEM_MC_BEGIN(1, 0);
17159 IEM_MC_ARG(uint64_t, u64Target, 0);
17160 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17161 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17162 IEM_MC_END()
17163 return VINF_SUCCESS;
17164
17165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17166 }
17167 }
17168 else
17169 {
17170 /* The new RIP is taken from a register. */
17171 switch (pIemCpu->enmEffOpSize)
17172 {
17173 case IEMMODE_16BIT:
17174 IEM_MC_BEGIN(1, 1);
17175 IEM_MC_ARG(uint16_t, u16Target, 0);
17176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17178 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17179 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17180 IEM_MC_END()
17181 return VINF_SUCCESS;
17182
17183 case IEMMODE_32BIT:
17184 IEM_MC_BEGIN(1, 1);
17185 IEM_MC_ARG(uint32_t, u32Target, 0);
17186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17188 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17189 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17190 IEM_MC_END()
17191 return VINF_SUCCESS;
17192
17193 case IEMMODE_64BIT:
17194 IEM_MC_BEGIN(1, 1);
17195 IEM_MC_ARG(uint64_t, u64Target, 0);
17196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17198 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17199 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17200 IEM_MC_END()
17201 return VINF_SUCCESS;
17202
17203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17204 }
17205 }
17206}
17207
17208typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17209
17210FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17211{
17212 /* Registers? How?? */
17213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17214 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17215
17216 /* Far pointer loaded from memory. */
17217 switch (pIemCpu->enmEffOpSize)
17218 {
17219 case IEMMODE_16BIT:
17220 IEM_MC_BEGIN(3, 1);
17221 IEM_MC_ARG(uint16_t, u16Sel, 0);
17222 IEM_MC_ARG(uint16_t, offSeg, 1);
17223 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17227 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17228 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17229 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17230 IEM_MC_END();
17231 return VINF_SUCCESS;
17232
17233 case IEMMODE_64BIT:
17234 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17235 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17236 * and call far qword [rsp] encodings. */
17237 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17238 {
17239 IEM_MC_BEGIN(3, 1);
17240 IEM_MC_ARG(uint16_t, u16Sel, 0);
17241 IEM_MC_ARG(uint64_t, offSeg, 1);
17242 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17246 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17247 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17248 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17249 IEM_MC_END();
17250 return VINF_SUCCESS;
17251 }
17252 /* AMD falls thru. */
17253
17254 case IEMMODE_32BIT:
17255 IEM_MC_BEGIN(3, 1);
17256 IEM_MC_ARG(uint16_t, u16Sel, 0);
17257 IEM_MC_ARG(uint32_t, offSeg, 1);
17258 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17262 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17263 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17264 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17265 IEM_MC_END();
17266 return VINF_SUCCESS;
17267
17268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17269 }
17270}
17271
17272
17273/**
17274 * Opcode 0xff /3.
17275 * @param bRm The RM byte.
17276 */
17277FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17278{
17279 IEMOP_MNEMONIC("callf Ep");
17280 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17281}
17282
17283
17284/**
17285 * Opcode 0xff /4.
17286 * @param bRm The RM byte.
17287 */
17288FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17289{
17290 IEMOP_MNEMONIC("jmpn Ev");
17291 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17292 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17293
17294 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17295 {
17296 /* The new RIP is taken from a register. */
17297 switch (pIemCpu->enmEffOpSize)
17298 {
17299 case IEMMODE_16BIT:
17300 IEM_MC_BEGIN(0, 1);
17301 IEM_MC_LOCAL(uint16_t, u16Target);
17302 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17303 IEM_MC_SET_RIP_U16(u16Target);
17304 IEM_MC_END()
17305 return VINF_SUCCESS;
17306
17307 case IEMMODE_32BIT:
17308 IEM_MC_BEGIN(0, 1);
17309 IEM_MC_LOCAL(uint32_t, u32Target);
17310 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17311 IEM_MC_SET_RIP_U32(u32Target);
17312 IEM_MC_END()
17313 return VINF_SUCCESS;
17314
17315 case IEMMODE_64BIT:
17316 IEM_MC_BEGIN(0, 1);
17317 IEM_MC_LOCAL(uint64_t, u64Target);
17318 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17319 IEM_MC_SET_RIP_U64(u64Target);
17320 IEM_MC_END()
17321 return VINF_SUCCESS;
17322
17323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17324 }
17325 }
17326 else
17327 {
17328 /* The new RIP is taken from a memory location. */
17329 switch (pIemCpu->enmEffOpSize)
17330 {
17331 case IEMMODE_16BIT:
17332 IEM_MC_BEGIN(0, 2);
17333 IEM_MC_LOCAL(uint16_t, u16Target);
17334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17336 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17337 IEM_MC_SET_RIP_U16(u16Target);
17338 IEM_MC_END()
17339 return VINF_SUCCESS;
17340
17341 case IEMMODE_32BIT:
17342 IEM_MC_BEGIN(0, 2);
17343 IEM_MC_LOCAL(uint32_t, u32Target);
17344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17346 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17347 IEM_MC_SET_RIP_U32(u32Target);
17348 IEM_MC_END()
17349 return VINF_SUCCESS;
17350
17351 case IEMMODE_64BIT:
17352 IEM_MC_BEGIN(0, 2);
17353 IEM_MC_LOCAL(uint64_t, u64Target);
17354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17356 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17357 IEM_MC_SET_RIP_U64(u64Target);
17358 IEM_MC_END()
17359 return VINF_SUCCESS;
17360
17361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17362 }
17363 }
17364}
17365
17366
17367/**
17368 * Opcode 0xff /5.
17369 * @param bRm The RM byte.
17370 */
17371FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17372{
17373 IEMOP_MNEMONIC("jmpf Ep");
17374 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17375}
17376
17377
17378/**
17379 * Opcode 0xff /6.
17380 * @param bRm The RM byte.
17381 */
17382FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17383{
17384 IEMOP_MNEMONIC("push Ev");
17385 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17386
17387 /* Registers are handled by a common worker. */
17388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17389 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17390
17391 /* Memory we do here. */
17392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17393 switch (pIemCpu->enmEffOpSize)
17394 {
17395 case IEMMODE_16BIT:
17396 IEM_MC_BEGIN(0, 2);
17397 IEM_MC_LOCAL(uint16_t, u16Src);
17398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17400 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17401 IEM_MC_PUSH_U16(u16Src);
17402 IEM_MC_ADVANCE_RIP();
17403 IEM_MC_END();
17404 return VINF_SUCCESS;
17405
17406 case IEMMODE_32BIT:
17407 IEM_MC_BEGIN(0, 2);
17408 IEM_MC_LOCAL(uint32_t, u32Src);
17409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17411 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17412 IEM_MC_PUSH_U32(u32Src);
17413 IEM_MC_ADVANCE_RIP();
17414 IEM_MC_END();
17415 return VINF_SUCCESS;
17416
17417 case IEMMODE_64BIT:
17418 IEM_MC_BEGIN(0, 2);
17419 IEM_MC_LOCAL(uint64_t, u64Src);
17420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17422 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17423 IEM_MC_PUSH_U64(u64Src);
17424 IEM_MC_ADVANCE_RIP();
17425 IEM_MC_END();
17426 return VINF_SUCCESS;
17427
17428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17429 }
17430}
17431
17432
17433/** Opcode 0xff. */
17434FNIEMOP_DEF(iemOp_Grp5)
17435{
17436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17437 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17438 {
17439 case 0:
17440 IEMOP_MNEMONIC("inc Ev");
17441 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17442 case 1:
17443 IEMOP_MNEMONIC("dec Ev");
17444 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17445 case 2:
17446 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17447 case 3:
17448 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17449 case 4:
17450 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17451 case 5:
17452 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17453 case 6:
17454 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17455 case 7:
17456 IEMOP_MNEMONIC("grp5-ud");
17457 return IEMOP_RAISE_INVALID_OPCODE();
17458 }
17459 AssertFailedReturn(VERR_IEM_IPE_3);
17460}
17461
17462
17463
17464const PFNIEMOP g_apfnOneByteMap[256] =
17465{
17466 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17467 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17468 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17469 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17470 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17471 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17472 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17473 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17474 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17475 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17476 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17477 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17478 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17479 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17480 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17481 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17482 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17483 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17484 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17485 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17486 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17487 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17488 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17489 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17490 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17491 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17492 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17493 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17494 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17495 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17496 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17497 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17498 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17499 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17500 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17501 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17502 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17503 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17504 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17505 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17506 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17507 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17508 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17509 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17510 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17511 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17512 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17513 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17514 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17515 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17516 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17517 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17518 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17519 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17520 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17521 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17522 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17523 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17524 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17525 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17526 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17527 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17528 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17529 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17530};
17531
17532
17533/** @} */
17534
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette