VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60991

Last change on this file since 60991 was 60991, checked in by vboxsync, 9 years ago

IEM: Implemented movntps/d and movaps/d. (lacks proper testcase)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 598.9 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60991 2016-05-16 19:26:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1698{
1699 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1702 {
1703 /*
1704 * Register, register.
1705 */
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1707 IEM_MC_BEGIN(0, 0);
1708 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1710 else
1711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1712 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1713 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1714 IEM_MC_ADVANCE_RIP();
1715 IEM_MC_END();
1716 }
1717 else
1718 {
1719 /*
1720 * Register, memory.
1721 */
1722 IEM_MC_BEGIN(0, 2);
1723 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1725
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1728 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1730 else
1731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1732
1733 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1734 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1735
1736 IEM_MC_ADVANCE_RIP();
1737 IEM_MC_END();
1738 }
1739 return VINF_SUCCESS;
1740}
1741
1742
1743/** Opcode 0x0f 0x29. */
1744FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1745{
1746 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1749 {
1750 /*
1751 * Register, register.
1752 */
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1754 IEM_MC_BEGIN(0, 0);
1755 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1757 else
1758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1759 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1760 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1761 IEM_MC_ADVANCE_RIP();
1762 IEM_MC_END();
1763 }
1764 else
1765 {
1766 /*
1767 * Memory, register.
1768 */
1769 IEM_MC_BEGIN(0, 2);
1770 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1775 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1777 else
1778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1779
1780 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1781 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 return VINF_SUCCESS;
1787}
1788
1789
1790/** Opcode 0x0f 0x2a. */
1791FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1792
1793
1794/** Opcode 0x0f 0x2b. */
1795FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1796{
1797 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1799 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1800 {
1801 /*
1802 * Register, memory.
1803 */
1804 IEM_MC_BEGIN(0, 2);
1805 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1807
1808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1809 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1810 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1812 else
1813 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1814
1815 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1816 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1817
1818 IEM_MC_ADVANCE_RIP();
1819 IEM_MC_END();
1820 }
1821 /* The register, register encoding is invalid. */
1822 else
1823 return IEMOP_RAISE_INVALID_OPCODE();
1824 return VINF_SUCCESS;
1825}
1826
1827
1828/** Opcode 0x0f 0x2c. */
1829FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1830/** Opcode 0x0f 0x2d. */
1831FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1832/** Opcode 0x0f 0x2e. */
1833FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1834/** Opcode 0x0f 0x2f. */
1835FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1836
1837
1838/** Opcode 0x0f 0x30. */
1839FNIEMOP_DEF(iemOp_wrmsr)
1840{
1841 IEMOP_MNEMONIC("wrmsr");
1842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1843 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1844}
1845
1846
1847/** Opcode 0x0f 0x31. */
1848FNIEMOP_DEF(iemOp_rdtsc)
1849{
1850 IEMOP_MNEMONIC("rdtsc");
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1853}
1854
1855
1856/** Opcode 0x0f 0x33. */
1857FNIEMOP_DEF(iemOp_rdmsr)
1858{
1859 IEMOP_MNEMONIC("rdmsr");
1860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1861 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1862}
1863
1864
1865/** Opcode 0x0f 0x34. */
1866FNIEMOP_STUB(iemOp_rdpmc);
1867/** Opcode 0x0f 0x34. */
1868FNIEMOP_STUB(iemOp_sysenter);
1869/** Opcode 0x0f 0x35. */
1870FNIEMOP_STUB(iemOp_sysexit);
1871/** Opcode 0x0f 0x37. */
1872FNIEMOP_STUB(iemOp_getsec);
1873/** Opcode 0x0f 0x38. */
1874FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1875/** Opcode 0x0f 0x3a. */
1876FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1877/** Opcode 0x0f 0x3c (?). */
1878FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1879
1880/**
1881 * Implements a conditional move.
1882 *
1883 * Wish there was an obvious way to do this where we could share and reduce
1884 * code bloat.
1885 *
1886 * @param a_Cnd The conditional "microcode" operation.
1887 */
1888#define CMOV_X(a_Cnd) \
1889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1891 { \
1892 switch (pIemCpu->enmEffOpSize) \
1893 { \
1894 case IEMMODE_16BIT: \
1895 IEM_MC_BEGIN(0, 1); \
1896 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1897 a_Cnd { \
1898 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1899 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1900 } IEM_MC_ENDIF(); \
1901 IEM_MC_ADVANCE_RIP(); \
1902 IEM_MC_END(); \
1903 return VINF_SUCCESS; \
1904 \
1905 case IEMMODE_32BIT: \
1906 IEM_MC_BEGIN(0, 1); \
1907 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1908 a_Cnd { \
1909 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1910 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1911 } IEM_MC_ELSE() { \
1912 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1913 } IEM_MC_ENDIF(); \
1914 IEM_MC_ADVANCE_RIP(); \
1915 IEM_MC_END(); \
1916 return VINF_SUCCESS; \
1917 \
1918 case IEMMODE_64BIT: \
1919 IEM_MC_BEGIN(0, 1); \
1920 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1921 a_Cnd { \
1922 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1923 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1924 } IEM_MC_ENDIF(); \
1925 IEM_MC_ADVANCE_RIP(); \
1926 IEM_MC_END(); \
1927 return VINF_SUCCESS; \
1928 \
1929 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1930 } \
1931 } \
1932 else \
1933 { \
1934 switch (pIemCpu->enmEffOpSize) \
1935 { \
1936 case IEMMODE_16BIT: \
1937 IEM_MC_BEGIN(0, 2); \
1938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1939 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1941 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1942 a_Cnd { \
1943 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1944 } IEM_MC_ENDIF(); \
1945 IEM_MC_ADVANCE_RIP(); \
1946 IEM_MC_END(); \
1947 return VINF_SUCCESS; \
1948 \
1949 case IEMMODE_32BIT: \
1950 IEM_MC_BEGIN(0, 2); \
1951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1952 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1954 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1955 a_Cnd { \
1956 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1957 } IEM_MC_ELSE() { \
1958 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1959 } IEM_MC_ENDIF(); \
1960 IEM_MC_ADVANCE_RIP(); \
1961 IEM_MC_END(); \
1962 return VINF_SUCCESS; \
1963 \
1964 case IEMMODE_64BIT: \
1965 IEM_MC_BEGIN(0, 2); \
1966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1967 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1969 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1970 a_Cnd { \
1971 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1972 } IEM_MC_ENDIF(); \
1973 IEM_MC_ADVANCE_RIP(); \
1974 IEM_MC_END(); \
1975 return VINF_SUCCESS; \
1976 \
1977 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1978 } \
1979 } do {} while (0)
1980
1981
1982
1983/** Opcode 0x0f 0x40. */
1984FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1985{
1986 IEMOP_MNEMONIC("cmovo Gv,Ev");
1987 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1988}
1989
1990
1991/** Opcode 0x0f 0x41. */
1992FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1993{
1994 IEMOP_MNEMONIC("cmovno Gv,Ev");
1995 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1996}
1997
1998
1999/** Opcode 0x0f 0x42. */
2000FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2001{
2002 IEMOP_MNEMONIC("cmovc Gv,Ev");
2003 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2004}
2005
2006
2007/** Opcode 0x0f 0x43. */
2008FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2009{
2010 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2011 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2012}
2013
2014
2015/** Opcode 0x0f 0x44. */
2016FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2017{
2018 IEMOP_MNEMONIC("cmove Gv,Ev");
2019 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2020}
2021
2022
2023/** Opcode 0x0f 0x45. */
2024FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2025{
2026 IEMOP_MNEMONIC("cmovne Gv,Ev");
2027 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2028}
2029
2030
2031/** Opcode 0x0f 0x46. */
2032FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2033{
2034 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2035 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2036}
2037
2038
2039/** Opcode 0x0f 0x47. */
2040FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2041{
2042 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2043 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2044}
2045
2046
2047/** Opcode 0x0f 0x48. */
2048FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2049{
2050 IEMOP_MNEMONIC("cmovs Gv,Ev");
2051 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2052}
2053
2054
2055/** Opcode 0x0f 0x49. */
2056FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2057{
2058 IEMOP_MNEMONIC("cmovns Gv,Ev");
2059 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2060}
2061
2062
2063/** Opcode 0x0f 0x4a. */
2064FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2065{
2066 IEMOP_MNEMONIC("cmovp Gv,Ev");
2067 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2068}
2069
2070
2071/** Opcode 0x0f 0x4b. */
2072FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2073{
2074 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2075 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2076}
2077
2078
2079/** Opcode 0x0f 0x4c. */
2080FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2081{
2082 IEMOP_MNEMONIC("cmovl Gv,Ev");
2083 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2084}
2085
2086
2087/** Opcode 0x0f 0x4d. */
2088FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2089{
2090 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2091 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2092}
2093
2094
2095/** Opcode 0x0f 0x4e. */
2096FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2097{
2098 IEMOP_MNEMONIC("cmovle Gv,Ev");
2099 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2100}
2101
2102
2103/** Opcode 0x0f 0x4f. */
2104FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2105{
2106 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2107 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2108}
2109
2110#undef CMOV_X
2111
2112/** Opcode 0x0f 0x50. */
2113FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2114/** Opcode 0x0f 0x51. */
2115FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2116/** Opcode 0x0f 0x52. */
2117FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2118/** Opcode 0x0f 0x53. */
2119FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2120/** Opcode 0x0f 0x54. */
2121FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2122/** Opcode 0x0f 0x55. */
2123FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2124/** Opcode 0x0f 0x56. */
2125FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2126/** Opcode 0x0f 0x57. */
2127FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2128/** Opcode 0x0f 0x58. */
2129FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2130/** Opcode 0x0f 0x59. */
2131FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2132/** Opcode 0x0f 0x5a. */
2133FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2134/** Opcode 0x0f 0x5b. */
2135FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2136/** Opcode 0x0f 0x5c. */
2137FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2138/** Opcode 0x0f 0x5d. */
2139FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2140/** Opcode 0x0f 0x5e. */
2141FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2142/** Opcode 0x0f 0x5f. */
2143FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2144
2145
2146/**
2147 * Common worker for SSE2 and MMX instructions on the forms:
2148 * pxxxx xmm1, xmm2/mem128
2149 * pxxxx mm1, mm2/mem32
2150 *
2151 * The 2nd operand is the first half of a register, which in the memory case
2152 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2153 * memory accessed for MMX.
2154 *
2155 * Exceptions type 4.
2156 */
2157FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2158{
2159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2160 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2161 {
2162 case IEM_OP_PRF_SIZE_OP: /* SSE */
2163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2164 {
2165 /*
2166 * Register, register.
2167 */
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169 IEM_MC_BEGIN(2, 0);
2170 IEM_MC_ARG(uint128_t *, pDst, 0);
2171 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2173 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2174 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2175 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 else
2180 {
2181 /*
2182 * Register, memory.
2183 */
2184 IEM_MC_BEGIN(2, 2);
2185 IEM_MC_ARG(uint128_t *, pDst, 0);
2186 IEM_MC_LOCAL(uint64_t, uSrc);
2187 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2189
2190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2193 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2194
2195 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2196 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2197
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 }
2201 return VINF_SUCCESS;
2202
2203 case 0: /* MMX */
2204 if (!pImpl->pfnU64)
2205 return IEMOP_RAISE_INVALID_OPCODE();
2206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2207 {
2208 /*
2209 * Register, register.
2210 */
2211 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2212 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_BEGIN(2, 0);
2215 IEM_MC_ARG(uint64_t *, pDst, 0);
2216 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2217 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2218 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2219 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2220 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2221 IEM_MC_ADVANCE_RIP();
2222 IEM_MC_END();
2223 }
2224 else
2225 {
2226 /*
2227 * Register, memory.
2228 */
2229 IEM_MC_BEGIN(2, 2);
2230 IEM_MC_ARG(uint64_t *, pDst, 0);
2231 IEM_MC_LOCAL(uint32_t, uSrc);
2232 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2234
2235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2237 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2238 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2239
2240 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2241 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2242
2243 IEM_MC_ADVANCE_RIP();
2244 IEM_MC_END();
2245 }
2246 return VINF_SUCCESS;
2247
2248 default:
2249 return IEMOP_RAISE_INVALID_OPCODE();
2250 }
2251}
2252
2253
2254/** Opcode 0x0f 0x60. */
2255FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2256{
2257 IEMOP_MNEMONIC("punpcklbw");
2258 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2259}
2260
2261
2262/** Opcode 0x0f 0x61. */
2263FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2264{
2265 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2266 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2267}
2268
2269
2270/** Opcode 0x0f 0x62. */
2271FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2272{
2273 IEMOP_MNEMONIC("punpckldq");
2274 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2275}
2276
2277
2278/** Opcode 0x0f 0x63. */
2279FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2280/** Opcode 0x0f 0x64. */
2281FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2282/** Opcode 0x0f 0x65. */
2283FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2284/** Opcode 0x0f 0x66. */
2285FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2286/** Opcode 0x0f 0x67. */
2287FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2288
2289
2290/**
2291 * Common worker for SSE2 and MMX instructions on the forms:
2292 * pxxxx xmm1, xmm2/mem128
2293 * pxxxx mm1, mm2/mem64
2294 *
2295 * The 2nd operand is the second half of a register, which in the memory case
2296 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2297 * where it may read the full 128 bits or only the upper 64 bits.
2298 *
2299 * Exceptions type 4.
2300 */
2301FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2302{
2303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2304 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2305 {
2306 case IEM_OP_PRF_SIZE_OP: /* SSE */
2307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2308 {
2309 /*
2310 * Register, register.
2311 */
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_BEGIN(2, 0);
2314 IEM_MC_ARG(uint128_t *, pDst, 0);
2315 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2316 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2317 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2318 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2319 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2320 IEM_MC_ADVANCE_RIP();
2321 IEM_MC_END();
2322 }
2323 else
2324 {
2325 /*
2326 * Register, memory.
2327 */
2328 IEM_MC_BEGIN(2, 2);
2329 IEM_MC_ARG(uint128_t *, pDst, 0);
2330 IEM_MC_LOCAL(uint128_t, uSrc);
2331 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2336 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2337 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2338
2339 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2340 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2341
2342 IEM_MC_ADVANCE_RIP();
2343 IEM_MC_END();
2344 }
2345 return VINF_SUCCESS;
2346
2347 case 0: /* MMX */
2348 if (!pImpl->pfnU64)
2349 return IEMOP_RAISE_INVALID_OPCODE();
2350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2351 {
2352 /*
2353 * Register, register.
2354 */
2355 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2356 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2358 IEM_MC_BEGIN(2, 0);
2359 IEM_MC_ARG(uint64_t *, pDst, 0);
2360 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2361 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2362 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2363 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2364 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2365 IEM_MC_ADVANCE_RIP();
2366 IEM_MC_END();
2367 }
2368 else
2369 {
2370 /*
2371 * Register, memory.
2372 */
2373 IEM_MC_BEGIN(2, 2);
2374 IEM_MC_ARG(uint64_t *, pDst, 0);
2375 IEM_MC_LOCAL(uint64_t, uSrc);
2376 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2378
2379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2381 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2382 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2383
2384 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2385 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2386
2387 IEM_MC_ADVANCE_RIP();
2388 IEM_MC_END();
2389 }
2390 return VINF_SUCCESS;
2391
2392 default:
2393 return IEMOP_RAISE_INVALID_OPCODE();
2394 }
2395}
2396
2397
2398/** Opcode 0x0f 0x68. */
2399FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2400{
2401 IEMOP_MNEMONIC("punpckhbw");
2402 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2403}
2404
2405
2406/** Opcode 0x0f 0x69. */
2407FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2408{
2409 IEMOP_MNEMONIC("punpckhwd");
2410 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2411}
2412
2413
2414/** Opcode 0x0f 0x6a. */
2415FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2416{
2417 IEMOP_MNEMONIC("punpckhdq");
2418 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2419}
2420
2421/** Opcode 0x0f 0x6b. */
2422FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2423
2424
2425/** Opcode 0x0f 0x6c. */
2426FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2427{
2428 IEMOP_MNEMONIC("punpcklqdq");
2429 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2430}
2431
2432
2433/** Opcode 0x0f 0x6d. */
2434FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2435{
2436 IEMOP_MNEMONIC("punpckhqdq");
2437 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2438}
2439
2440
2441/** Opcode 0x0f 0x6e. */
2442FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2443{
2444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2445 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2446 {
2447 case IEM_OP_PRF_SIZE_OP: /* SSE */
2448 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2450 {
2451 /* XMM, greg*/
2452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2453 IEM_MC_BEGIN(0, 1);
2454 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2455 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2456 {
2457 IEM_MC_LOCAL(uint64_t, u64Tmp);
2458 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2459 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2460 }
2461 else
2462 {
2463 IEM_MC_LOCAL(uint32_t, u32Tmp);
2464 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2465 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2466 }
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 else
2471 {
2472 /* XMM, [mem] */
2473 IEM_MC_BEGIN(0, 2);
2474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2475 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2478 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2479 {
2480 IEM_MC_LOCAL(uint64_t, u64Tmp);
2481 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2482 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2483 }
2484 else
2485 {
2486 IEM_MC_LOCAL(uint32_t, u32Tmp);
2487 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2488 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2489 }
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 return VINF_SUCCESS;
2494
2495 case 0: /* MMX */
2496 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2498 {
2499 /* MMX, greg */
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_BEGIN(0, 1);
2502 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2503 IEM_MC_LOCAL(uint64_t, u64Tmp);
2504 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2505 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2506 else
2507 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2508 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2509 IEM_MC_ADVANCE_RIP();
2510 IEM_MC_END();
2511 }
2512 else
2513 {
2514 /* MMX, [mem] */
2515 IEM_MC_BEGIN(0, 2);
2516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2517 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2520 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2521 {
2522 IEM_MC_LOCAL(uint64_t, u64Tmp);
2523 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2524 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2525 }
2526 else
2527 {
2528 IEM_MC_LOCAL(uint32_t, u32Tmp);
2529 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2530 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2531 }
2532 IEM_MC_ADVANCE_RIP();
2533 IEM_MC_END();
2534 }
2535 return VINF_SUCCESS;
2536
2537 default:
2538 return IEMOP_RAISE_INVALID_OPCODE();
2539 }
2540}
2541
2542
2543/** Opcode 0x0f 0x6f. */
2544FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2545{
2546 bool fAligned = false;
2547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2548 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2549 {
2550 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2551 fAligned = true;
2552 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2553 if (fAligned)
2554 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2555 else
2556 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2558 {
2559 /*
2560 * Register, register.
2561 */
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2563 IEM_MC_BEGIN(0, 1);
2564 IEM_MC_LOCAL(uint128_t, u128Tmp);
2565 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2566 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2567 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2568 IEM_MC_ADVANCE_RIP();
2569 IEM_MC_END();
2570 }
2571 else
2572 {
2573 /*
2574 * Register, memory.
2575 */
2576 IEM_MC_BEGIN(0, 2);
2577 IEM_MC_LOCAL(uint128_t, u128Tmp);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2583 if (fAligned)
2584 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2585 else
2586 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2587 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2588
2589 IEM_MC_ADVANCE_RIP();
2590 IEM_MC_END();
2591 }
2592 return VINF_SUCCESS;
2593
2594 case 0: /* MMX */
2595 IEMOP_MNEMONIC("movq Pq,Qq");
2596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2597 {
2598 /*
2599 * Register, register.
2600 */
2601 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2602 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2604 IEM_MC_BEGIN(0, 1);
2605 IEM_MC_LOCAL(uint64_t, u64Tmp);
2606 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2607 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2608 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2609 IEM_MC_ADVANCE_RIP();
2610 IEM_MC_END();
2611 }
2612 else
2613 {
2614 /*
2615 * Register, memory.
2616 */
2617 IEM_MC_BEGIN(0, 2);
2618 IEM_MC_LOCAL(uint64_t, u64Tmp);
2619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2620
2621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2623 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2624 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2625 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2626
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 return VINF_SUCCESS;
2631
2632 default:
2633 return IEMOP_RAISE_INVALID_OPCODE();
2634 }
2635}
2636
2637
2638/** Opcode 0x0f 0x70. The immediate here is evil! */
2639FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2640{
2641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2642 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2643 {
2644 case IEM_OP_PRF_SIZE_OP: /* SSE */
2645 case IEM_OP_PRF_REPNZ: /* SSE */
2646 case IEM_OP_PRF_REPZ: /* SSE */
2647 {
2648 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2649 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2650 {
2651 case IEM_OP_PRF_SIZE_OP:
2652 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2653 pfnAImpl = iemAImpl_pshufd;
2654 break;
2655 case IEM_OP_PRF_REPNZ:
2656 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2657 pfnAImpl = iemAImpl_pshuflw;
2658 break;
2659 case IEM_OP_PRF_REPZ:
2660 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2661 pfnAImpl = iemAImpl_pshufhw;
2662 break;
2663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2664 }
2665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2666 {
2667 /*
2668 * Register, register.
2669 */
2670 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2672
2673 IEM_MC_BEGIN(3, 0);
2674 IEM_MC_ARG(uint128_t *, pDst, 0);
2675 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2676 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2677 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2678 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2679 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2680 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 }
2684 else
2685 {
2686 /*
2687 * Register, memory.
2688 */
2689 IEM_MC_BEGIN(3, 2);
2690 IEM_MC_ARG(uint128_t *, pDst, 0);
2691 IEM_MC_LOCAL(uint128_t, uSrc);
2692 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2694
2695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2696 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2697 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2699 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2700
2701 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2702 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2703 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2704
2705 IEM_MC_ADVANCE_RIP();
2706 IEM_MC_END();
2707 }
2708 return VINF_SUCCESS;
2709 }
2710
2711 case 0: /* MMX Extension */
2712 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2714 {
2715 /*
2716 * Register, register.
2717 */
2718 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2720
2721 IEM_MC_BEGIN(3, 0);
2722 IEM_MC_ARG(uint64_t *, pDst, 0);
2723 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2724 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2725 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2726 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2727 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2728 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2729 IEM_MC_ADVANCE_RIP();
2730 IEM_MC_END();
2731 }
2732 else
2733 {
2734 /*
2735 * Register, memory.
2736 */
2737 IEM_MC_BEGIN(3, 2);
2738 IEM_MC_ARG(uint64_t *, pDst, 0);
2739 IEM_MC_LOCAL(uint64_t, uSrc);
2740 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2742
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2744 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2748
2749 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2750 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2751 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2752
2753 IEM_MC_ADVANCE_RIP();
2754 IEM_MC_END();
2755 }
2756 return VINF_SUCCESS;
2757
2758 default:
2759 return IEMOP_RAISE_INVALID_OPCODE();
2760 }
2761}
2762
2763
2764/** Opcode 0x0f 0x71 11/2. */
2765FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2766
2767/** Opcode 0x66 0x0f 0x71 11/2. */
2768FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2769
2770/** Opcode 0x0f 0x71 11/4. */
2771FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2772
2773/** Opcode 0x66 0x0f 0x71 11/4. */
2774FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2775
2776/** Opcode 0x0f 0x71 11/6. */
2777FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2778
2779/** Opcode 0x66 0x0f 0x71 11/6. */
2780FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2781
2782
2783/** Opcode 0x0f 0x71. */
2784FNIEMOP_DEF(iemOp_Grp12)
2785{
2786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2787 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2788 return IEMOP_RAISE_INVALID_OPCODE();
2789 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2790 {
2791 case 0: case 1: case 3: case 5: case 7:
2792 return IEMOP_RAISE_INVALID_OPCODE();
2793 case 2:
2794 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2795 {
2796 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2797 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2798 default: return IEMOP_RAISE_INVALID_OPCODE();
2799 }
2800 case 4:
2801 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2802 {
2803 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2804 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2805 default: return IEMOP_RAISE_INVALID_OPCODE();
2806 }
2807 case 6:
2808 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2809 {
2810 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2811 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2812 default: return IEMOP_RAISE_INVALID_OPCODE();
2813 }
2814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2815 }
2816}
2817
2818
2819/** Opcode 0x0f 0x72 11/2. */
2820FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2821
2822/** Opcode 0x66 0x0f 0x72 11/2. */
2823FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2824
2825/** Opcode 0x0f 0x72 11/4. */
2826FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2827
2828/** Opcode 0x66 0x0f 0x72 11/4. */
2829FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2830
2831/** Opcode 0x0f 0x72 11/6. */
2832FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2833
2834/** Opcode 0x66 0x0f 0x72 11/6. */
2835FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2836
2837
2838/** Opcode 0x0f 0x72. */
2839FNIEMOP_DEF(iemOp_Grp13)
2840{
2841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2842 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2843 return IEMOP_RAISE_INVALID_OPCODE();
2844 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2845 {
2846 case 0: case 1: case 3: case 5: case 7:
2847 return IEMOP_RAISE_INVALID_OPCODE();
2848 case 2:
2849 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2850 {
2851 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2852 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2853 default: return IEMOP_RAISE_INVALID_OPCODE();
2854 }
2855 case 4:
2856 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2857 {
2858 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2859 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2860 default: return IEMOP_RAISE_INVALID_OPCODE();
2861 }
2862 case 6:
2863 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2864 {
2865 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2866 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2867 default: return IEMOP_RAISE_INVALID_OPCODE();
2868 }
2869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2870 }
2871}
2872
2873
2874/** Opcode 0x0f 0x73 11/2. */
2875FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2876
2877/** Opcode 0x66 0x0f 0x73 11/2. */
2878FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2879
2880/** Opcode 0x66 0x0f 0x73 11/3. */
2881FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2882
2883/** Opcode 0x0f 0x73 11/6. */
2884FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2885
2886/** Opcode 0x66 0x0f 0x73 11/6. */
2887FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2888
2889/** Opcode 0x66 0x0f 0x73 11/7. */
2890FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2891
2892
2893/** Opcode 0x0f 0x73. */
2894FNIEMOP_DEF(iemOp_Grp14)
2895{
2896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2897 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2898 return IEMOP_RAISE_INVALID_OPCODE();
2899 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2900 {
2901 case 0: case 1: case 4: case 5:
2902 return IEMOP_RAISE_INVALID_OPCODE();
2903 case 2:
2904 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2905 {
2906 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2907 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2908 default: return IEMOP_RAISE_INVALID_OPCODE();
2909 }
2910 case 3:
2911 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2912 {
2913 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2914 default: return IEMOP_RAISE_INVALID_OPCODE();
2915 }
2916 case 6:
2917 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2918 {
2919 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2920 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2921 default: return IEMOP_RAISE_INVALID_OPCODE();
2922 }
2923 case 7:
2924 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2925 {
2926 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2927 default: return IEMOP_RAISE_INVALID_OPCODE();
2928 }
2929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2930 }
2931}
2932
2933
2934/**
2935 * Common worker for SSE2 and MMX instructions on the forms:
2936 * pxxx mm1, mm2/mem64
2937 * pxxx xmm1, xmm2/mem128
2938 *
2939 * Proper alignment of the 128-bit operand is enforced.
2940 * Exceptions type 4. SSE2 and MMX cpuid checks.
2941 */
2942FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2943{
2944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2945 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2946 {
2947 case IEM_OP_PRF_SIZE_OP: /* SSE */
2948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2949 {
2950 /*
2951 * Register, register.
2952 */
2953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2954 IEM_MC_BEGIN(2, 0);
2955 IEM_MC_ARG(uint128_t *, pDst, 0);
2956 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2957 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2958 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2959 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2960 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2961 IEM_MC_ADVANCE_RIP();
2962 IEM_MC_END();
2963 }
2964 else
2965 {
2966 /*
2967 * Register, memory.
2968 */
2969 IEM_MC_BEGIN(2, 2);
2970 IEM_MC_ARG(uint128_t *, pDst, 0);
2971 IEM_MC_LOCAL(uint128_t, uSrc);
2972 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2974
2975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2978 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2979
2980 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2981 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2982
2983 IEM_MC_ADVANCE_RIP();
2984 IEM_MC_END();
2985 }
2986 return VINF_SUCCESS;
2987
2988 case 0: /* MMX */
2989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2990 {
2991 /*
2992 * Register, register.
2993 */
2994 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2995 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2997 IEM_MC_BEGIN(2, 0);
2998 IEM_MC_ARG(uint64_t *, pDst, 0);
2999 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3000 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3001 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3002 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3003 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3004 IEM_MC_ADVANCE_RIP();
3005 IEM_MC_END();
3006 }
3007 else
3008 {
3009 /*
3010 * Register, memory.
3011 */
3012 IEM_MC_BEGIN(2, 2);
3013 IEM_MC_ARG(uint64_t *, pDst, 0);
3014 IEM_MC_LOCAL(uint64_t, uSrc);
3015 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3017
3018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3020 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3021 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3022
3023 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3024 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3025
3026 IEM_MC_ADVANCE_RIP();
3027 IEM_MC_END();
3028 }
3029 return VINF_SUCCESS;
3030
3031 default:
3032 return IEMOP_RAISE_INVALID_OPCODE();
3033 }
3034}
3035
3036
3037/** Opcode 0x0f 0x74. */
3038FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3039{
3040 IEMOP_MNEMONIC("pcmpeqb");
3041 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3042}
3043
3044
3045/** Opcode 0x0f 0x75. */
3046FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3047{
3048 IEMOP_MNEMONIC("pcmpeqw");
3049 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3050}
3051
3052
3053/** Opcode 0x0f 0x76. */
3054FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3055{
3056 IEMOP_MNEMONIC("pcmpeqd");
3057 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3058}
3059
3060
3061/** Opcode 0x0f 0x77. */
3062FNIEMOP_STUB(iemOp_emms);
3063/** Opcode 0x0f 0x78. */
3064FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3065/** Opcode 0x0f 0x79. */
3066FNIEMOP_UD_STUB(iemOp_vmwrite);
3067/** Opcode 0x0f 0x7c. */
3068FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3069/** Opcode 0x0f 0x7d. */
3070FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3071
3072
3073/** Opcode 0x0f 0x7e. */
3074FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3075{
3076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3077 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3078 {
3079 case IEM_OP_PRF_SIZE_OP: /* SSE */
3080 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3082 {
3083 /* greg, XMM */
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_BEGIN(0, 1);
3086 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3087 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3088 {
3089 IEM_MC_LOCAL(uint64_t, u64Tmp);
3090 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3091 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3092 }
3093 else
3094 {
3095 IEM_MC_LOCAL(uint32_t, u32Tmp);
3096 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3097 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3098 }
3099 IEM_MC_ADVANCE_RIP();
3100 IEM_MC_END();
3101 }
3102 else
3103 {
3104 /* [mem], XMM */
3105 IEM_MC_BEGIN(0, 2);
3106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3107 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3110 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3111 {
3112 IEM_MC_LOCAL(uint64_t, u64Tmp);
3113 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3114 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3115 }
3116 else
3117 {
3118 IEM_MC_LOCAL(uint32_t, u32Tmp);
3119 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3120 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3121 }
3122 IEM_MC_ADVANCE_RIP();
3123 IEM_MC_END();
3124 }
3125 return VINF_SUCCESS;
3126
3127 case 0: /* MMX */
3128 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3130 {
3131 /* greg, MMX */
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_BEGIN(0, 1);
3134 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3135 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3136 {
3137 IEM_MC_LOCAL(uint64_t, u64Tmp);
3138 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3139 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3140 }
3141 else
3142 {
3143 IEM_MC_LOCAL(uint32_t, u32Tmp);
3144 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3145 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3146 }
3147 IEM_MC_ADVANCE_RIP();
3148 IEM_MC_END();
3149 }
3150 else
3151 {
3152 /* [mem], MMX */
3153 IEM_MC_BEGIN(0, 2);
3154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3155 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3158 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3159 {
3160 IEM_MC_LOCAL(uint64_t, u64Tmp);
3161 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3162 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3163 }
3164 else
3165 {
3166 IEM_MC_LOCAL(uint32_t, u32Tmp);
3167 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3168 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3169 }
3170 IEM_MC_ADVANCE_RIP();
3171 IEM_MC_END();
3172 }
3173 return VINF_SUCCESS;
3174
3175 default:
3176 return IEMOP_RAISE_INVALID_OPCODE();
3177 }
3178}
3179
3180
3181/** Opcode 0x0f 0x7f. */
3182FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3183{
3184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3185 bool fAligned = false;
3186 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3187 {
3188 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3189 fAligned = true;
3190 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3191 if (fAligned)
3192 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3193 else
3194 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3196 {
3197 /*
3198 * Register, register.
3199 */
3200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3201 IEM_MC_BEGIN(0, 1);
3202 IEM_MC_LOCAL(uint128_t, u128Tmp);
3203 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3204 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3205 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3206 IEM_MC_ADVANCE_RIP();
3207 IEM_MC_END();
3208 }
3209 else
3210 {
3211 /*
3212 * Register, memory.
3213 */
3214 IEM_MC_BEGIN(0, 2);
3215 IEM_MC_LOCAL(uint128_t, u128Tmp);
3216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3217
3218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3221 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3222 if (fAligned)
3223 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3224 else
3225 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3226
3227 IEM_MC_ADVANCE_RIP();
3228 IEM_MC_END();
3229 }
3230 return VINF_SUCCESS;
3231
3232 case 0: /* MMX */
3233 IEMOP_MNEMONIC("movq Qq,Pq");
3234
3235 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3236 {
3237 /*
3238 * Register, register.
3239 */
3240 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3241 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3243 IEM_MC_BEGIN(0, 1);
3244 IEM_MC_LOCAL(uint64_t, u64Tmp);
3245 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3246 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3247 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3248 IEM_MC_ADVANCE_RIP();
3249 IEM_MC_END();
3250 }
3251 else
3252 {
3253 /*
3254 * Register, memory.
3255 */
3256 IEM_MC_BEGIN(0, 2);
3257 IEM_MC_LOCAL(uint64_t, u64Tmp);
3258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3259
3260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3262 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3263 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3264 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3265
3266 IEM_MC_ADVANCE_RIP();
3267 IEM_MC_END();
3268 }
3269 return VINF_SUCCESS;
3270
3271 default:
3272 return IEMOP_RAISE_INVALID_OPCODE();
3273 }
3274}
3275
3276
3277
3278/** Opcode 0x0f 0x80. */
3279FNIEMOP_DEF(iemOp_jo_Jv)
3280{
3281 IEMOP_MNEMONIC("jo Jv");
3282 IEMOP_HLP_MIN_386();
3283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3284 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3285 {
3286 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3287 IEMOP_HLP_NO_LOCK_PREFIX();
3288
3289 IEM_MC_BEGIN(0, 0);
3290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3291 IEM_MC_REL_JMP_S16(i16Imm);
3292 } IEM_MC_ELSE() {
3293 IEM_MC_ADVANCE_RIP();
3294 } IEM_MC_ENDIF();
3295 IEM_MC_END();
3296 }
3297 else
3298 {
3299 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3300 IEMOP_HLP_NO_LOCK_PREFIX();
3301
3302 IEM_MC_BEGIN(0, 0);
3303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3304 IEM_MC_REL_JMP_S32(i32Imm);
3305 } IEM_MC_ELSE() {
3306 IEM_MC_ADVANCE_RIP();
3307 } IEM_MC_ENDIF();
3308 IEM_MC_END();
3309 }
3310 return VINF_SUCCESS;
3311}
3312
3313
3314/** Opcode 0x0f 0x81. */
3315FNIEMOP_DEF(iemOp_jno_Jv)
3316{
3317 IEMOP_MNEMONIC("jno Jv");
3318 IEMOP_HLP_MIN_386();
3319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3320 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3321 {
3322 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3323 IEMOP_HLP_NO_LOCK_PREFIX();
3324
3325 IEM_MC_BEGIN(0, 0);
3326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3327 IEM_MC_ADVANCE_RIP();
3328 } IEM_MC_ELSE() {
3329 IEM_MC_REL_JMP_S16(i16Imm);
3330 } IEM_MC_ENDIF();
3331 IEM_MC_END();
3332 }
3333 else
3334 {
3335 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3336 IEMOP_HLP_NO_LOCK_PREFIX();
3337
3338 IEM_MC_BEGIN(0, 0);
3339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3340 IEM_MC_ADVANCE_RIP();
3341 } IEM_MC_ELSE() {
3342 IEM_MC_REL_JMP_S32(i32Imm);
3343 } IEM_MC_ENDIF();
3344 IEM_MC_END();
3345 }
3346 return VINF_SUCCESS;
3347}
3348
3349
3350/** Opcode 0x0f 0x82. */
3351FNIEMOP_DEF(iemOp_jc_Jv)
3352{
3353 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3354 IEMOP_HLP_MIN_386();
3355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3356 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3357 {
3358 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3359 IEMOP_HLP_NO_LOCK_PREFIX();
3360
3361 IEM_MC_BEGIN(0, 0);
3362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3363 IEM_MC_REL_JMP_S16(i16Imm);
3364 } IEM_MC_ELSE() {
3365 IEM_MC_ADVANCE_RIP();
3366 } IEM_MC_ENDIF();
3367 IEM_MC_END();
3368 }
3369 else
3370 {
3371 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3372 IEMOP_HLP_NO_LOCK_PREFIX();
3373
3374 IEM_MC_BEGIN(0, 0);
3375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3376 IEM_MC_REL_JMP_S32(i32Imm);
3377 } IEM_MC_ELSE() {
3378 IEM_MC_ADVANCE_RIP();
3379 } IEM_MC_ENDIF();
3380 IEM_MC_END();
3381 }
3382 return VINF_SUCCESS;
3383}
3384
3385
3386/** Opcode 0x0f 0x83. */
3387FNIEMOP_DEF(iemOp_jnc_Jv)
3388{
3389 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3390 IEMOP_HLP_MIN_386();
3391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3392 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3393 {
3394 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3395 IEMOP_HLP_NO_LOCK_PREFIX();
3396
3397 IEM_MC_BEGIN(0, 0);
3398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3399 IEM_MC_ADVANCE_RIP();
3400 } IEM_MC_ELSE() {
3401 IEM_MC_REL_JMP_S16(i16Imm);
3402 } IEM_MC_ENDIF();
3403 IEM_MC_END();
3404 }
3405 else
3406 {
3407 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3408 IEMOP_HLP_NO_LOCK_PREFIX();
3409
3410 IEM_MC_BEGIN(0, 0);
3411 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3412 IEM_MC_ADVANCE_RIP();
3413 } IEM_MC_ELSE() {
3414 IEM_MC_REL_JMP_S32(i32Imm);
3415 } IEM_MC_ENDIF();
3416 IEM_MC_END();
3417 }
3418 return VINF_SUCCESS;
3419}
3420
3421
3422/** Opcode 0x0f 0x84. */
3423FNIEMOP_DEF(iemOp_je_Jv)
3424{
3425 IEMOP_MNEMONIC("je/jz Jv");
3426 IEMOP_HLP_MIN_386();
3427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3428 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3429 {
3430 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3431 IEMOP_HLP_NO_LOCK_PREFIX();
3432
3433 IEM_MC_BEGIN(0, 0);
3434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3435 IEM_MC_REL_JMP_S16(i16Imm);
3436 } IEM_MC_ELSE() {
3437 IEM_MC_ADVANCE_RIP();
3438 } IEM_MC_ENDIF();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3444 IEMOP_HLP_NO_LOCK_PREFIX();
3445
3446 IEM_MC_BEGIN(0, 0);
3447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3448 IEM_MC_REL_JMP_S32(i32Imm);
3449 } IEM_MC_ELSE() {
3450 IEM_MC_ADVANCE_RIP();
3451 } IEM_MC_ENDIF();
3452 IEM_MC_END();
3453 }
3454 return VINF_SUCCESS;
3455}
3456
3457
3458/** Opcode 0x0f 0x85. */
3459FNIEMOP_DEF(iemOp_jne_Jv)
3460{
3461 IEMOP_MNEMONIC("jne/jnz Jv");
3462 IEMOP_HLP_MIN_386();
3463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3464 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3465 {
3466 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3467 IEMOP_HLP_NO_LOCK_PREFIX();
3468
3469 IEM_MC_BEGIN(0, 0);
3470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3471 IEM_MC_ADVANCE_RIP();
3472 } IEM_MC_ELSE() {
3473 IEM_MC_REL_JMP_S16(i16Imm);
3474 } IEM_MC_ENDIF();
3475 IEM_MC_END();
3476 }
3477 else
3478 {
3479 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3480 IEMOP_HLP_NO_LOCK_PREFIX();
3481
3482 IEM_MC_BEGIN(0, 0);
3483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3484 IEM_MC_ADVANCE_RIP();
3485 } IEM_MC_ELSE() {
3486 IEM_MC_REL_JMP_S32(i32Imm);
3487 } IEM_MC_ENDIF();
3488 IEM_MC_END();
3489 }
3490 return VINF_SUCCESS;
3491}
3492
3493
3494/** Opcode 0x0f 0x86. */
3495FNIEMOP_DEF(iemOp_jbe_Jv)
3496{
3497 IEMOP_MNEMONIC("jbe/jna Jv");
3498 IEMOP_HLP_MIN_386();
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3500 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3501 {
3502 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3503 IEMOP_HLP_NO_LOCK_PREFIX();
3504
3505 IEM_MC_BEGIN(0, 0);
3506 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3507 IEM_MC_REL_JMP_S16(i16Imm);
3508 } IEM_MC_ELSE() {
3509 IEM_MC_ADVANCE_RIP();
3510 } IEM_MC_ENDIF();
3511 IEM_MC_END();
3512 }
3513 else
3514 {
3515 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3516 IEMOP_HLP_NO_LOCK_PREFIX();
3517
3518 IEM_MC_BEGIN(0, 0);
3519 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3520 IEM_MC_REL_JMP_S32(i32Imm);
3521 } IEM_MC_ELSE() {
3522 IEM_MC_ADVANCE_RIP();
3523 } IEM_MC_ENDIF();
3524 IEM_MC_END();
3525 }
3526 return VINF_SUCCESS;
3527}
3528
3529
3530/** Opcode 0x0f 0x87. */
3531FNIEMOP_DEF(iemOp_jnbe_Jv)
3532{
3533 IEMOP_MNEMONIC("jnbe/ja Jv");
3534 IEMOP_HLP_MIN_386();
3535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3536 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3537 {
3538 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3539 IEMOP_HLP_NO_LOCK_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3543 IEM_MC_ADVANCE_RIP();
3544 } IEM_MC_ELSE() {
3545 IEM_MC_REL_JMP_S16(i16Imm);
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548 }
3549 else
3550 {
3551 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3552 IEMOP_HLP_NO_LOCK_PREFIX();
3553
3554 IEM_MC_BEGIN(0, 0);
3555 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3556 IEM_MC_ADVANCE_RIP();
3557 } IEM_MC_ELSE() {
3558 IEM_MC_REL_JMP_S32(i32Imm);
3559 } IEM_MC_ENDIF();
3560 IEM_MC_END();
3561 }
3562 return VINF_SUCCESS;
3563}
3564
3565
3566/** Opcode 0x0f 0x88. */
3567FNIEMOP_DEF(iemOp_js_Jv)
3568{
3569 IEMOP_MNEMONIC("js Jv");
3570 IEMOP_HLP_MIN_386();
3571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3572 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3573 {
3574 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3575 IEMOP_HLP_NO_LOCK_PREFIX();
3576
3577 IEM_MC_BEGIN(0, 0);
3578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3579 IEM_MC_REL_JMP_S16(i16Imm);
3580 } IEM_MC_ELSE() {
3581 IEM_MC_ADVANCE_RIP();
3582 } IEM_MC_ENDIF();
3583 IEM_MC_END();
3584 }
3585 else
3586 {
3587 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3588 IEMOP_HLP_NO_LOCK_PREFIX();
3589
3590 IEM_MC_BEGIN(0, 0);
3591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3592 IEM_MC_REL_JMP_S32(i32Imm);
3593 } IEM_MC_ELSE() {
3594 IEM_MC_ADVANCE_RIP();
3595 } IEM_MC_ENDIF();
3596 IEM_MC_END();
3597 }
3598 return VINF_SUCCESS;
3599}
3600
3601
3602/** Opcode 0x0f 0x89. */
3603FNIEMOP_DEF(iemOp_jns_Jv)
3604{
3605 IEMOP_MNEMONIC("jns Jv");
3606 IEMOP_HLP_MIN_386();
3607 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3608 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3609 {
3610 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3611 IEMOP_HLP_NO_LOCK_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0);
3614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3615 IEM_MC_ADVANCE_RIP();
3616 } IEM_MC_ELSE() {
3617 IEM_MC_REL_JMP_S16(i16Imm);
3618 } IEM_MC_ENDIF();
3619 IEM_MC_END();
3620 }
3621 else
3622 {
3623 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3624 IEMOP_HLP_NO_LOCK_PREFIX();
3625
3626 IEM_MC_BEGIN(0, 0);
3627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3628 IEM_MC_ADVANCE_RIP();
3629 } IEM_MC_ELSE() {
3630 IEM_MC_REL_JMP_S32(i32Imm);
3631 } IEM_MC_ENDIF();
3632 IEM_MC_END();
3633 }
3634 return VINF_SUCCESS;
3635}
3636
3637
3638/** Opcode 0x0f 0x8a. */
3639FNIEMOP_DEF(iemOp_jp_Jv)
3640{
3641 IEMOP_MNEMONIC("jp Jv");
3642 IEMOP_HLP_MIN_386();
3643 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3644 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3645 {
3646 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3647 IEMOP_HLP_NO_LOCK_PREFIX();
3648
3649 IEM_MC_BEGIN(0, 0);
3650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3651 IEM_MC_REL_JMP_S16(i16Imm);
3652 } IEM_MC_ELSE() {
3653 IEM_MC_ADVANCE_RIP();
3654 } IEM_MC_ENDIF();
3655 IEM_MC_END();
3656 }
3657 else
3658 {
3659 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3660 IEMOP_HLP_NO_LOCK_PREFIX();
3661
3662 IEM_MC_BEGIN(0, 0);
3663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3664 IEM_MC_REL_JMP_S32(i32Imm);
3665 } IEM_MC_ELSE() {
3666 IEM_MC_ADVANCE_RIP();
3667 } IEM_MC_ENDIF();
3668 IEM_MC_END();
3669 }
3670 return VINF_SUCCESS;
3671}
3672
3673
3674/** Opcode 0x0f 0x8b. */
3675FNIEMOP_DEF(iemOp_jnp_Jv)
3676{
3677 IEMOP_MNEMONIC("jo Jv");
3678 IEMOP_HLP_MIN_386();
3679 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3680 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3681 {
3682 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3683 IEMOP_HLP_NO_LOCK_PREFIX();
3684
3685 IEM_MC_BEGIN(0, 0);
3686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3687 IEM_MC_ADVANCE_RIP();
3688 } IEM_MC_ELSE() {
3689 IEM_MC_REL_JMP_S16(i16Imm);
3690 } IEM_MC_ENDIF();
3691 IEM_MC_END();
3692 }
3693 else
3694 {
3695 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3696 IEMOP_HLP_NO_LOCK_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0);
3699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3700 IEM_MC_ADVANCE_RIP();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_REL_JMP_S32(i32Imm);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705 }
3706 return VINF_SUCCESS;
3707}
3708
3709
3710/** Opcode 0x0f 0x8c. */
3711FNIEMOP_DEF(iemOp_jl_Jv)
3712{
3713 IEMOP_MNEMONIC("jl/jnge Jv");
3714 IEMOP_HLP_MIN_386();
3715 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3716 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3717 {
3718 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3719 IEMOP_HLP_NO_LOCK_PREFIX();
3720
3721 IEM_MC_BEGIN(0, 0);
3722 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3723 IEM_MC_REL_JMP_S16(i16Imm);
3724 } IEM_MC_ELSE() {
3725 IEM_MC_ADVANCE_RIP();
3726 } IEM_MC_ENDIF();
3727 IEM_MC_END();
3728 }
3729 else
3730 {
3731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3732 IEMOP_HLP_NO_LOCK_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0);
3735 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3736 IEM_MC_REL_JMP_S32(i32Imm);
3737 } IEM_MC_ELSE() {
3738 IEM_MC_ADVANCE_RIP();
3739 } IEM_MC_ENDIF();
3740 IEM_MC_END();
3741 }
3742 return VINF_SUCCESS;
3743}
3744
3745
3746/** Opcode 0x0f 0x8d. */
3747FNIEMOP_DEF(iemOp_jnl_Jv)
3748{
3749 IEMOP_MNEMONIC("jnl/jge Jv");
3750 IEMOP_HLP_MIN_386();
3751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3752 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3753 {
3754 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3755 IEMOP_HLP_NO_LOCK_PREFIX();
3756
3757 IEM_MC_BEGIN(0, 0);
3758 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3759 IEM_MC_ADVANCE_RIP();
3760 } IEM_MC_ELSE() {
3761 IEM_MC_REL_JMP_S16(i16Imm);
3762 } IEM_MC_ENDIF();
3763 IEM_MC_END();
3764 }
3765 else
3766 {
3767 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3768 IEMOP_HLP_NO_LOCK_PREFIX();
3769
3770 IEM_MC_BEGIN(0, 0);
3771 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3772 IEM_MC_ADVANCE_RIP();
3773 } IEM_MC_ELSE() {
3774 IEM_MC_REL_JMP_S32(i32Imm);
3775 } IEM_MC_ENDIF();
3776 IEM_MC_END();
3777 }
3778 return VINF_SUCCESS;
3779}
3780
3781
3782/** Opcode 0x0f 0x8e. */
3783FNIEMOP_DEF(iemOp_jle_Jv)
3784{
3785 IEMOP_MNEMONIC("jle/jng Jv");
3786 IEMOP_HLP_MIN_386();
3787 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3788 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3789 {
3790 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3791 IEMOP_HLP_NO_LOCK_PREFIX();
3792
3793 IEM_MC_BEGIN(0, 0);
3794 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3795 IEM_MC_REL_JMP_S16(i16Imm);
3796 } IEM_MC_ELSE() {
3797 IEM_MC_ADVANCE_RIP();
3798 } IEM_MC_ENDIF();
3799 IEM_MC_END();
3800 }
3801 else
3802 {
3803 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3804 IEMOP_HLP_NO_LOCK_PREFIX();
3805
3806 IEM_MC_BEGIN(0, 0);
3807 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3808 IEM_MC_REL_JMP_S32(i32Imm);
3809 } IEM_MC_ELSE() {
3810 IEM_MC_ADVANCE_RIP();
3811 } IEM_MC_ENDIF();
3812 IEM_MC_END();
3813 }
3814 return VINF_SUCCESS;
3815}
3816
3817
3818/** Opcode 0x0f 0x8f. */
3819FNIEMOP_DEF(iemOp_jnle_Jv)
3820{
3821 IEMOP_MNEMONIC("jnle/jg Jv");
3822 IEMOP_HLP_MIN_386();
3823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3824 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3825 {
3826 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3827 IEMOP_HLP_NO_LOCK_PREFIX();
3828
3829 IEM_MC_BEGIN(0, 0);
3830 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3831 IEM_MC_ADVANCE_RIP();
3832 } IEM_MC_ELSE() {
3833 IEM_MC_REL_JMP_S16(i16Imm);
3834 } IEM_MC_ENDIF();
3835 IEM_MC_END();
3836 }
3837 else
3838 {
3839 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3840 IEMOP_HLP_NO_LOCK_PREFIX();
3841
3842 IEM_MC_BEGIN(0, 0);
3843 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3844 IEM_MC_ADVANCE_RIP();
3845 } IEM_MC_ELSE() {
3846 IEM_MC_REL_JMP_S32(i32Imm);
3847 } IEM_MC_ENDIF();
3848 IEM_MC_END();
3849 }
3850 return VINF_SUCCESS;
3851}
3852
3853
3854/** Opcode 0x0f 0x90. */
3855FNIEMOP_DEF(iemOp_seto_Eb)
3856{
3857 IEMOP_MNEMONIC("seto Eb");
3858 IEMOP_HLP_MIN_386();
3859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3860 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3861
3862 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3863 * any way. AMD says it's "unused", whatever that means. We're
3864 * ignoring for now. */
3865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3866 {
3867 /* register target */
3868 IEM_MC_BEGIN(0, 0);
3869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3870 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3871 } IEM_MC_ELSE() {
3872 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3873 } IEM_MC_ENDIF();
3874 IEM_MC_ADVANCE_RIP();
3875 IEM_MC_END();
3876 }
3877 else
3878 {
3879 /* memory target */
3880 IEM_MC_BEGIN(0, 1);
3881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3884 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3885 } IEM_MC_ELSE() {
3886 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3887 } IEM_MC_ENDIF();
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 }
3891 return VINF_SUCCESS;
3892}
3893
3894
3895/** Opcode 0x0f 0x91. */
3896FNIEMOP_DEF(iemOp_setno_Eb)
3897{
3898 IEMOP_MNEMONIC("setno Eb");
3899 IEMOP_HLP_MIN_386();
3900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3901 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3902
3903 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3904 * any way. AMD says it's "unused", whatever that means. We're
3905 * ignoring for now. */
3906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3907 {
3908 /* register target */
3909 IEM_MC_BEGIN(0, 0);
3910 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3911 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3912 } IEM_MC_ELSE() {
3913 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3914 } IEM_MC_ENDIF();
3915 IEM_MC_ADVANCE_RIP();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 /* memory target */
3921 IEM_MC_BEGIN(0, 1);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3925 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3926 } IEM_MC_ELSE() {
3927 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3928 } IEM_MC_ENDIF();
3929 IEM_MC_ADVANCE_RIP();
3930 IEM_MC_END();
3931 }
3932 return VINF_SUCCESS;
3933}
3934
3935
3936/** Opcode 0x0f 0x92. */
3937FNIEMOP_DEF(iemOp_setc_Eb)
3938{
3939 IEMOP_MNEMONIC("setc Eb");
3940 IEMOP_HLP_MIN_386();
3941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3942 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3943
3944 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3945 * any way. AMD says it's "unused", whatever that means. We're
3946 * ignoring for now. */
3947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3948 {
3949 /* register target */
3950 IEM_MC_BEGIN(0, 0);
3951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3952 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3953 } IEM_MC_ELSE() {
3954 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3955 } IEM_MC_ENDIF();
3956 IEM_MC_ADVANCE_RIP();
3957 IEM_MC_END();
3958 }
3959 else
3960 {
3961 /* memory target */
3962 IEM_MC_BEGIN(0, 1);
3963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3966 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3967 } IEM_MC_ELSE() {
3968 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3969 } IEM_MC_ENDIF();
3970 IEM_MC_ADVANCE_RIP();
3971 IEM_MC_END();
3972 }
3973 return VINF_SUCCESS;
3974}
3975
3976
3977/** Opcode 0x0f 0x93. */
3978FNIEMOP_DEF(iemOp_setnc_Eb)
3979{
3980 IEMOP_MNEMONIC("setnc Eb");
3981 IEMOP_HLP_MIN_386();
3982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3983 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3984
3985 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3986 * any way. AMD says it's "unused", whatever that means. We're
3987 * ignoring for now. */
3988 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3989 {
3990 /* register target */
3991 IEM_MC_BEGIN(0, 0);
3992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3993 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3994 } IEM_MC_ELSE() {
3995 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3996 } IEM_MC_ENDIF();
3997 IEM_MC_ADVANCE_RIP();
3998 IEM_MC_END();
3999 }
4000 else
4001 {
4002 /* memory target */
4003 IEM_MC_BEGIN(0, 1);
4004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4007 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4008 } IEM_MC_ELSE() {
4009 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4010 } IEM_MC_ENDIF();
4011 IEM_MC_ADVANCE_RIP();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x94. */
4019FNIEMOP_DEF(iemOp_sete_Eb)
4020{
4021 IEMOP_MNEMONIC("sete Eb");
4022 IEMOP_HLP_MIN_386();
4023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4024 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4025
4026 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4027 * any way. AMD says it's "unused", whatever that means. We're
4028 * ignoring for now. */
4029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4030 {
4031 /* register target */
4032 IEM_MC_BEGIN(0, 0);
4033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4034 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4035 } IEM_MC_ELSE() {
4036 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4037 } IEM_MC_ENDIF();
4038 IEM_MC_ADVANCE_RIP();
4039 IEM_MC_END();
4040 }
4041 else
4042 {
4043 /* memory target */
4044 IEM_MC_BEGIN(0, 1);
4045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4047 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4048 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4049 } IEM_MC_ELSE() {
4050 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4051 } IEM_MC_ENDIF();
4052 IEM_MC_ADVANCE_RIP();
4053 IEM_MC_END();
4054 }
4055 return VINF_SUCCESS;
4056}
4057
4058
4059/** Opcode 0x0f 0x95. */
4060FNIEMOP_DEF(iemOp_setne_Eb)
4061{
4062 IEMOP_MNEMONIC("setne Eb");
4063 IEMOP_HLP_MIN_386();
4064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4065 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4066
4067 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4068 * any way. AMD says it's "unused", whatever that means. We're
4069 * ignoring for now. */
4070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4071 {
4072 /* register target */
4073 IEM_MC_BEGIN(0, 0);
4074 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4075 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4076 } IEM_MC_ELSE() {
4077 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4078 } IEM_MC_ENDIF();
4079 IEM_MC_ADVANCE_RIP();
4080 IEM_MC_END();
4081 }
4082 else
4083 {
4084 /* memory target */
4085 IEM_MC_BEGIN(0, 1);
4086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4089 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4090 } IEM_MC_ELSE() {
4091 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4092 } IEM_MC_ENDIF();
4093 IEM_MC_ADVANCE_RIP();
4094 IEM_MC_END();
4095 }
4096 return VINF_SUCCESS;
4097}
4098
4099
4100/** Opcode 0x0f 0x96. */
4101FNIEMOP_DEF(iemOp_setbe_Eb)
4102{
4103 IEMOP_MNEMONIC("setbe Eb");
4104 IEMOP_HLP_MIN_386();
4105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4106 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4107
4108 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4109 * any way. AMD says it's "unused", whatever that means. We're
4110 * ignoring for now. */
4111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4112 {
4113 /* register target */
4114 IEM_MC_BEGIN(0, 0);
4115 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4116 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4117 } IEM_MC_ELSE() {
4118 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4119 } IEM_MC_ENDIF();
4120 IEM_MC_ADVANCE_RIP();
4121 IEM_MC_END();
4122 }
4123 else
4124 {
4125 /* memory target */
4126 IEM_MC_BEGIN(0, 1);
4127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4129 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4130 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4131 } IEM_MC_ELSE() {
4132 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4133 } IEM_MC_ENDIF();
4134 IEM_MC_ADVANCE_RIP();
4135 IEM_MC_END();
4136 }
4137 return VINF_SUCCESS;
4138}
4139
4140
4141/** Opcode 0x0f 0x97. */
4142FNIEMOP_DEF(iemOp_setnbe_Eb)
4143{
4144 IEMOP_MNEMONIC("setnbe Eb");
4145 IEMOP_HLP_MIN_386();
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4148
4149 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4150 * any way. AMD says it's "unused", whatever that means. We're
4151 * ignoring for now. */
4152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4153 {
4154 /* register target */
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4157 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4158 } IEM_MC_ELSE() {
4159 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4160 } IEM_MC_ENDIF();
4161 IEM_MC_ADVANCE_RIP();
4162 IEM_MC_END();
4163 }
4164 else
4165 {
4166 /* memory target */
4167 IEM_MC_BEGIN(0, 1);
4168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4170 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4171 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4172 } IEM_MC_ELSE() {
4173 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4174 } IEM_MC_ENDIF();
4175 IEM_MC_ADVANCE_RIP();
4176 IEM_MC_END();
4177 }
4178 return VINF_SUCCESS;
4179}
4180
4181
4182/** Opcode 0x0f 0x98. */
4183FNIEMOP_DEF(iemOp_sets_Eb)
4184{
4185 IEMOP_MNEMONIC("sets Eb");
4186 IEMOP_HLP_MIN_386();
4187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4188 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4189
4190 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4191 * any way. AMD says it's "unused", whatever that means. We're
4192 * ignoring for now. */
4193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4194 {
4195 /* register target */
4196 IEM_MC_BEGIN(0, 0);
4197 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4198 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4199 } IEM_MC_ELSE() {
4200 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4201 } IEM_MC_ENDIF();
4202 IEM_MC_ADVANCE_RIP();
4203 IEM_MC_END();
4204 }
4205 else
4206 {
4207 /* memory target */
4208 IEM_MC_BEGIN(0, 1);
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4211 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4212 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4213 } IEM_MC_ELSE() {
4214 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4215 } IEM_MC_ENDIF();
4216 IEM_MC_ADVANCE_RIP();
4217 IEM_MC_END();
4218 }
4219 return VINF_SUCCESS;
4220}
4221
4222
4223/** Opcode 0x0f 0x99. */
4224FNIEMOP_DEF(iemOp_setns_Eb)
4225{
4226 IEMOP_MNEMONIC("setns Eb");
4227 IEMOP_HLP_MIN_386();
4228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4229 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4230
4231 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4232 * any way. AMD says it's "unused", whatever that means. We're
4233 * ignoring for now. */
4234 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4235 {
4236 /* register target */
4237 IEM_MC_BEGIN(0, 0);
4238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4239 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4240 } IEM_MC_ELSE() {
4241 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4242 } IEM_MC_ENDIF();
4243 IEM_MC_ADVANCE_RIP();
4244 IEM_MC_END();
4245 }
4246 else
4247 {
4248 /* memory target */
4249 IEM_MC_BEGIN(0, 1);
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4253 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4254 } IEM_MC_ELSE() {
4255 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4256 } IEM_MC_ENDIF();
4257 IEM_MC_ADVANCE_RIP();
4258 IEM_MC_END();
4259 }
4260 return VINF_SUCCESS;
4261}
4262
4263
4264/** Opcode 0x0f 0x9a. */
4265FNIEMOP_DEF(iemOp_setp_Eb)
4266{
4267 IEMOP_MNEMONIC("setnp Eb");
4268 IEMOP_HLP_MIN_386();
4269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4270 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4271
4272 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4273 * any way. AMD says it's "unused", whatever that means. We're
4274 * ignoring for now. */
4275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4276 {
4277 /* register target */
4278 IEM_MC_BEGIN(0, 0);
4279 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4280 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4281 } IEM_MC_ELSE() {
4282 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4283 } IEM_MC_ENDIF();
4284 IEM_MC_ADVANCE_RIP();
4285 IEM_MC_END();
4286 }
4287 else
4288 {
4289 /* memory target */
4290 IEM_MC_BEGIN(0, 1);
4291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4293 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4294 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4295 } IEM_MC_ELSE() {
4296 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4297 } IEM_MC_ENDIF();
4298 IEM_MC_ADVANCE_RIP();
4299 IEM_MC_END();
4300 }
4301 return VINF_SUCCESS;
4302}
4303
4304
4305/** Opcode 0x0f 0x9b. */
4306FNIEMOP_DEF(iemOp_setnp_Eb)
4307{
4308 IEMOP_MNEMONIC("setnp Eb");
4309 IEMOP_HLP_MIN_386();
4310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4311 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4312
4313 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4314 * any way. AMD says it's "unused", whatever that means. We're
4315 * ignoring for now. */
4316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4317 {
4318 /* register target */
4319 IEM_MC_BEGIN(0, 0);
4320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4321 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4322 } IEM_MC_ELSE() {
4323 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4324 } IEM_MC_ENDIF();
4325 IEM_MC_ADVANCE_RIP();
4326 IEM_MC_END();
4327 }
4328 else
4329 {
4330 /* memory target */
4331 IEM_MC_BEGIN(0, 1);
4332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4334 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4335 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4336 } IEM_MC_ELSE() {
4337 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4338 } IEM_MC_ENDIF();
4339 IEM_MC_ADVANCE_RIP();
4340 IEM_MC_END();
4341 }
4342 return VINF_SUCCESS;
4343}
4344
4345
4346/** Opcode 0x0f 0x9c. */
4347FNIEMOP_DEF(iemOp_setl_Eb)
4348{
4349 IEMOP_MNEMONIC("setl Eb");
4350 IEMOP_HLP_MIN_386();
4351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4352 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4353
4354 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4355 * any way. AMD says it's "unused", whatever that means. We're
4356 * ignoring for now. */
4357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4358 {
4359 /* register target */
4360 IEM_MC_BEGIN(0, 0);
4361 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4362 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4363 } IEM_MC_ELSE() {
4364 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4365 } IEM_MC_ENDIF();
4366 IEM_MC_ADVANCE_RIP();
4367 IEM_MC_END();
4368 }
4369 else
4370 {
4371 /* memory target */
4372 IEM_MC_BEGIN(0, 1);
4373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4375 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4376 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4377 } IEM_MC_ELSE() {
4378 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4379 } IEM_MC_ENDIF();
4380 IEM_MC_ADVANCE_RIP();
4381 IEM_MC_END();
4382 }
4383 return VINF_SUCCESS;
4384}
4385
4386
4387/** Opcode 0x0f 0x9d. */
4388FNIEMOP_DEF(iemOp_setnl_Eb)
4389{
4390 IEMOP_MNEMONIC("setnl Eb");
4391 IEMOP_HLP_MIN_386();
4392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4393 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4394
4395 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4396 * any way. AMD says it's "unused", whatever that means. We're
4397 * ignoring for now. */
4398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4399 {
4400 /* register target */
4401 IEM_MC_BEGIN(0, 0);
4402 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4403 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4404 } IEM_MC_ELSE() {
4405 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4406 } IEM_MC_ENDIF();
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 }
4410 else
4411 {
4412 /* memory target */
4413 IEM_MC_BEGIN(0, 1);
4414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4416 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4417 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4418 } IEM_MC_ELSE() {
4419 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4420 } IEM_MC_ENDIF();
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 }
4424 return VINF_SUCCESS;
4425}
4426
4427
4428/** Opcode 0x0f 0x9e. */
4429FNIEMOP_DEF(iemOp_setle_Eb)
4430{
4431 IEMOP_MNEMONIC("setle Eb");
4432 IEMOP_HLP_MIN_386();
4433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4434 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4435
4436 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4437 * any way. AMD says it's "unused", whatever that means. We're
4438 * ignoring for now. */
4439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4440 {
4441 /* register target */
4442 IEM_MC_BEGIN(0, 0);
4443 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4445 } IEM_MC_ELSE() {
4446 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4447 } IEM_MC_ENDIF();
4448 IEM_MC_ADVANCE_RIP();
4449 IEM_MC_END();
4450 }
4451 else
4452 {
4453 /* memory target */
4454 IEM_MC_BEGIN(0, 1);
4455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4457 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4458 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4459 } IEM_MC_ELSE() {
4460 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4461 } IEM_MC_ENDIF();
4462 IEM_MC_ADVANCE_RIP();
4463 IEM_MC_END();
4464 }
4465 return VINF_SUCCESS;
4466}
4467
4468
4469/** Opcode 0x0f 0x9f. */
4470FNIEMOP_DEF(iemOp_setnle_Eb)
4471{
4472 IEMOP_MNEMONIC("setnle Eb");
4473 IEMOP_HLP_MIN_386();
4474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4475 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4476
4477 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4478 * any way. AMD says it's "unused", whatever that means. We're
4479 * ignoring for now. */
4480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4481 {
4482 /* register target */
4483 IEM_MC_BEGIN(0, 0);
4484 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4485 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4486 } IEM_MC_ELSE() {
4487 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4488 } IEM_MC_ENDIF();
4489 IEM_MC_ADVANCE_RIP();
4490 IEM_MC_END();
4491 }
4492 else
4493 {
4494 /* memory target */
4495 IEM_MC_BEGIN(0, 1);
4496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4498 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/**
4511 * Common 'push segment-register' helper.
4512 */
4513FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4514{
4515 IEMOP_HLP_NO_LOCK_PREFIX();
4516 if (iReg < X86_SREG_FS)
4517 IEMOP_HLP_NO_64BIT();
4518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4519
4520 switch (pIemCpu->enmEffOpSize)
4521 {
4522 case IEMMODE_16BIT:
4523 IEM_MC_BEGIN(0, 1);
4524 IEM_MC_LOCAL(uint16_t, u16Value);
4525 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4526 IEM_MC_PUSH_U16(u16Value);
4527 IEM_MC_ADVANCE_RIP();
4528 IEM_MC_END();
4529 break;
4530
4531 case IEMMODE_32BIT:
4532 IEM_MC_BEGIN(0, 1);
4533 IEM_MC_LOCAL(uint32_t, u32Value);
4534 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4535 IEM_MC_PUSH_U32_SREG(u32Value);
4536 IEM_MC_ADVANCE_RIP();
4537 IEM_MC_END();
4538 break;
4539
4540 case IEMMODE_64BIT:
4541 IEM_MC_BEGIN(0, 1);
4542 IEM_MC_LOCAL(uint64_t, u64Value);
4543 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4544 IEM_MC_PUSH_U64(u64Value);
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 break;
4548 }
4549
4550 return VINF_SUCCESS;
4551}
4552
4553
4554/** Opcode 0x0f 0xa0. */
4555FNIEMOP_DEF(iemOp_push_fs)
4556{
4557 IEMOP_MNEMONIC("push fs");
4558 IEMOP_HLP_MIN_386();
4559 IEMOP_HLP_NO_LOCK_PREFIX();
4560 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4561}
4562
4563
4564/** Opcode 0x0f 0xa1. */
4565FNIEMOP_DEF(iemOp_pop_fs)
4566{
4567 IEMOP_MNEMONIC("pop fs");
4568 IEMOP_HLP_MIN_386();
4569 IEMOP_HLP_NO_LOCK_PREFIX();
4570 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4571}
4572
4573
4574/** Opcode 0x0f 0xa2. */
4575FNIEMOP_DEF(iemOp_cpuid)
4576{
4577 IEMOP_MNEMONIC("cpuid");
4578 IEMOP_HLP_MIN_486(); /* not all 486es. */
4579 IEMOP_HLP_NO_LOCK_PREFIX();
4580 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4581}
4582
4583
4584/**
4585 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4586 * iemOp_bts_Ev_Gv.
4587 */
4588FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4589{
4590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4592
4593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4594 {
4595 /* register destination. */
4596 IEMOP_HLP_NO_LOCK_PREFIX();
4597 switch (pIemCpu->enmEffOpSize)
4598 {
4599 case IEMMODE_16BIT:
4600 IEM_MC_BEGIN(3, 0);
4601 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4602 IEM_MC_ARG(uint16_t, u16Src, 1);
4603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4604
4605 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4606 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4607 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4608 IEM_MC_REF_EFLAGS(pEFlags);
4609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4610
4611 IEM_MC_ADVANCE_RIP();
4612 IEM_MC_END();
4613 return VINF_SUCCESS;
4614
4615 case IEMMODE_32BIT:
4616 IEM_MC_BEGIN(3, 0);
4617 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4618 IEM_MC_ARG(uint32_t, u32Src, 1);
4619 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4620
4621 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4622 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4623 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4624 IEM_MC_REF_EFLAGS(pEFlags);
4625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4626
4627 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4628 IEM_MC_ADVANCE_RIP();
4629 IEM_MC_END();
4630 return VINF_SUCCESS;
4631
4632 case IEMMODE_64BIT:
4633 IEM_MC_BEGIN(3, 0);
4634 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4635 IEM_MC_ARG(uint64_t, u64Src, 1);
4636 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4637
4638 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4639 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4640 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4641 IEM_MC_REF_EFLAGS(pEFlags);
4642 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4643
4644 IEM_MC_ADVANCE_RIP();
4645 IEM_MC_END();
4646 return VINF_SUCCESS;
4647
4648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4649 }
4650 }
4651 else
4652 {
4653 /* memory destination. */
4654
4655 uint32_t fAccess;
4656 if (pImpl->pfnLockedU16)
4657 fAccess = IEM_ACCESS_DATA_RW;
4658 else /* BT */
4659 {
4660 IEMOP_HLP_NO_LOCK_PREFIX();
4661 fAccess = IEM_ACCESS_DATA_R;
4662 }
4663
4664 NOREF(fAccess);
4665
4666 /** @todo test negative bit offsets! */
4667 switch (pIemCpu->enmEffOpSize)
4668 {
4669 case IEMMODE_16BIT:
4670 IEM_MC_BEGIN(3, 2);
4671 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4672 IEM_MC_ARG(uint16_t, u16Src, 1);
4673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4675 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4676
4677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4678 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4679 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4680 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4681 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4682 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4683 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4684 IEM_MC_FETCH_EFLAGS(EFlags);
4685
4686 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4687 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4689 else
4690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4692
4693 IEM_MC_COMMIT_EFLAGS(EFlags);
4694 IEM_MC_ADVANCE_RIP();
4695 IEM_MC_END();
4696 return VINF_SUCCESS;
4697
4698 case IEMMODE_32BIT:
4699 IEM_MC_BEGIN(3, 2);
4700 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4701 IEM_MC_ARG(uint32_t, u32Src, 1);
4702 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4704 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4705
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4708 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4709 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4710 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4711 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4712 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4713 IEM_MC_FETCH_EFLAGS(EFlags);
4714
4715 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4716 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4718 else
4719 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4721
4722 IEM_MC_COMMIT_EFLAGS(EFlags);
4723 IEM_MC_ADVANCE_RIP();
4724 IEM_MC_END();
4725 return VINF_SUCCESS;
4726
4727 case IEMMODE_64BIT:
4728 IEM_MC_BEGIN(3, 2);
4729 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4730 IEM_MC_ARG(uint64_t, u64Src, 1);
4731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4733 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4734
4735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4736 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4737 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4738 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4739 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4740 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4741 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4742 IEM_MC_FETCH_EFLAGS(EFlags);
4743
4744 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4745 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4747 else
4748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4750
4751 IEM_MC_COMMIT_EFLAGS(EFlags);
4752 IEM_MC_ADVANCE_RIP();
4753 IEM_MC_END();
4754 return VINF_SUCCESS;
4755
4756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4757 }
4758 }
4759}
4760
4761
4762/** Opcode 0x0f 0xa3. */
4763FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4764{
4765 IEMOP_MNEMONIC("bt Gv,Gv");
4766 IEMOP_HLP_MIN_386();
4767 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4768}
4769
4770
4771/**
4772 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4773 */
4774FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4775{
4776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4777 IEMOP_HLP_NO_LOCK_PREFIX();
4778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4779
4780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4781 {
4782 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4783 IEMOP_HLP_NO_LOCK_PREFIX();
4784
4785 switch (pIemCpu->enmEffOpSize)
4786 {
4787 case IEMMODE_16BIT:
4788 IEM_MC_BEGIN(4, 0);
4789 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4790 IEM_MC_ARG(uint16_t, u16Src, 1);
4791 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4792 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4793
4794 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4795 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4796 IEM_MC_REF_EFLAGS(pEFlags);
4797 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4798
4799 IEM_MC_ADVANCE_RIP();
4800 IEM_MC_END();
4801 return VINF_SUCCESS;
4802
4803 case IEMMODE_32BIT:
4804 IEM_MC_BEGIN(4, 0);
4805 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4806 IEM_MC_ARG(uint32_t, u32Src, 1);
4807 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4808 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4809
4810 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4811 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4812 IEM_MC_REF_EFLAGS(pEFlags);
4813 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4814
4815 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 return VINF_SUCCESS;
4819
4820 case IEMMODE_64BIT:
4821 IEM_MC_BEGIN(4, 0);
4822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4823 IEM_MC_ARG(uint64_t, u64Src, 1);
4824 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4825 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4826
4827 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4828 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4829 IEM_MC_REF_EFLAGS(pEFlags);
4830 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4831
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 return VINF_SUCCESS;
4835
4836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4837 }
4838 }
4839 else
4840 {
4841 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4842
4843 switch (pIemCpu->enmEffOpSize)
4844 {
4845 case IEMMODE_16BIT:
4846 IEM_MC_BEGIN(4, 2);
4847 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4848 IEM_MC_ARG(uint16_t, u16Src, 1);
4849 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4852
4853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4854 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4855 IEM_MC_ASSIGN(cShiftArg, cShift);
4856 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4857 IEM_MC_FETCH_EFLAGS(EFlags);
4858 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4859 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4860
4861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4862 IEM_MC_COMMIT_EFLAGS(EFlags);
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(4, 2);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4874
4875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4876 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4877 IEM_MC_ASSIGN(cShiftArg, cShift);
4878 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4879 IEM_MC_FETCH_EFLAGS(EFlags);
4880 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4881 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4882
4883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4884 IEM_MC_COMMIT_EFLAGS(EFlags);
4885 IEM_MC_ADVANCE_RIP();
4886 IEM_MC_END();
4887 return VINF_SUCCESS;
4888
4889 case IEMMODE_64BIT:
4890 IEM_MC_BEGIN(4, 2);
4891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4892 IEM_MC_ARG(uint64_t, u64Src, 1);
4893 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4894 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4896
4897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4898 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4899 IEM_MC_ASSIGN(cShiftArg, cShift);
4900 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4901 IEM_MC_FETCH_EFLAGS(EFlags);
4902 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4903 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4904
4905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4906 IEM_MC_COMMIT_EFLAGS(EFlags);
4907 IEM_MC_ADVANCE_RIP();
4908 IEM_MC_END();
4909 return VINF_SUCCESS;
4910
4911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4912 }
4913 }
4914}
4915
4916
4917/**
4918 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4919 */
4920FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4921{
4922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4923 IEMOP_HLP_NO_LOCK_PREFIX();
4924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4925
4926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4927 {
4928 IEMOP_HLP_NO_LOCK_PREFIX();
4929
4930 switch (pIemCpu->enmEffOpSize)
4931 {
4932 case IEMMODE_16BIT:
4933 IEM_MC_BEGIN(4, 0);
4934 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4935 IEM_MC_ARG(uint16_t, u16Src, 1);
4936 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4937 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4938
4939 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4940 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4941 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4942 IEM_MC_REF_EFLAGS(pEFlags);
4943 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4944
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_32BIT:
4950 IEM_MC_BEGIN(4, 0);
4951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4952 IEM_MC_ARG(uint32_t, u32Src, 1);
4953 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4954 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4955
4956 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4957 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4958 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4959 IEM_MC_REF_EFLAGS(pEFlags);
4960 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4961
4962 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4963 IEM_MC_ADVANCE_RIP();
4964 IEM_MC_END();
4965 return VINF_SUCCESS;
4966
4967 case IEMMODE_64BIT:
4968 IEM_MC_BEGIN(4, 0);
4969 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4970 IEM_MC_ARG(uint64_t, u64Src, 1);
4971 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4972 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4973
4974 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4975 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4976 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4977 IEM_MC_REF_EFLAGS(pEFlags);
4978 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4979
4980 IEM_MC_ADVANCE_RIP();
4981 IEM_MC_END();
4982 return VINF_SUCCESS;
4983
4984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4985 }
4986 }
4987 else
4988 {
4989 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4990
4991 switch (pIemCpu->enmEffOpSize)
4992 {
4993 case IEMMODE_16BIT:
4994 IEM_MC_BEGIN(4, 2);
4995 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4996 IEM_MC_ARG(uint16_t, u16Src, 1);
4997 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4998 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5000
5001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5002 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5003 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5004 IEM_MC_FETCH_EFLAGS(EFlags);
5005 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5006 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5007
5008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5009 IEM_MC_COMMIT_EFLAGS(EFlags);
5010 IEM_MC_ADVANCE_RIP();
5011 IEM_MC_END();
5012 return VINF_SUCCESS;
5013
5014 case IEMMODE_32BIT:
5015 IEM_MC_BEGIN(4, 2);
5016 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5017 IEM_MC_ARG(uint32_t, u32Src, 1);
5018 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5021
5022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5023 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5024 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5025 IEM_MC_FETCH_EFLAGS(EFlags);
5026 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5027 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5028
5029 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5030 IEM_MC_COMMIT_EFLAGS(EFlags);
5031 IEM_MC_ADVANCE_RIP();
5032 IEM_MC_END();
5033 return VINF_SUCCESS;
5034
5035 case IEMMODE_64BIT:
5036 IEM_MC_BEGIN(4, 2);
5037 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5038 IEM_MC_ARG(uint64_t, u64Src, 1);
5039 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5042
5043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5044 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5045 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5046 IEM_MC_FETCH_EFLAGS(EFlags);
5047 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5048 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5049
5050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5051 IEM_MC_COMMIT_EFLAGS(EFlags);
5052 IEM_MC_ADVANCE_RIP();
5053 IEM_MC_END();
5054 return VINF_SUCCESS;
5055
5056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5057 }
5058 }
5059}
5060
5061
5062
5063/** Opcode 0x0f 0xa4. */
5064FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5065{
5066 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5067 IEMOP_HLP_MIN_386();
5068 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5069}
5070
5071
5072/** Opcode 0x0f 0xa5. */
5073FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5074{
5075 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5076 IEMOP_HLP_MIN_386();
5077 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5078}
5079
5080
5081/** Opcode 0x0f 0xa8. */
5082FNIEMOP_DEF(iemOp_push_gs)
5083{
5084 IEMOP_MNEMONIC("push gs");
5085 IEMOP_HLP_MIN_386();
5086 IEMOP_HLP_NO_LOCK_PREFIX();
5087 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5088}
5089
5090
5091/** Opcode 0x0f 0xa9. */
5092FNIEMOP_DEF(iemOp_pop_gs)
5093{
5094 IEMOP_MNEMONIC("pop gs");
5095 IEMOP_HLP_MIN_386();
5096 IEMOP_HLP_NO_LOCK_PREFIX();
5097 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5098}
5099
5100
5101/** Opcode 0x0f 0xaa. */
5102FNIEMOP_STUB(iemOp_rsm);
5103//IEMOP_HLP_MIN_386();
5104
5105
5106/** Opcode 0x0f 0xab. */
5107FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5108{
5109 IEMOP_MNEMONIC("bts Ev,Gv");
5110 IEMOP_HLP_MIN_386();
5111 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5112}
5113
5114
5115/** Opcode 0x0f 0xac. */
5116FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5117{
5118 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5119 IEMOP_HLP_MIN_386();
5120 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5121}
5122
5123
5124/** Opcode 0x0f 0xad. */
5125FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5126{
5127 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5128 IEMOP_HLP_MIN_386();
5129 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5130}
5131
5132
5133/** Opcode 0x0f 0xae mem/0. */
5134FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5135{
5136 IEMOP_MNEMONIC("fxsave m512");
5137 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5138 return IEMOP_RAISE_INVALID_OPCODE();
5139
5140 IEM_MC_BEGIN(3, 1);
5141 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5142 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5143 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5146 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5147 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5148 IEM_MC_END();
5149 return VINF_SUCCESS;
5150}
5151
5152
5153/** Opcode 0x0f 0xae mem/1. */
5154FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5155{
5156 IEMOP_MNEMONIC("fxrstor m512");
5157 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5158 return IEMOP_RAISE_INVALID_OPCODE();
5159
5160 IEM_MC_BEGIN(3, 1);
5161 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5162 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5163 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5166 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5167 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5168 IEM_MC_END();
5169 return VINF_SUCCESS;
5170}
5171
5172
5173/** Opcode 0x0f 0xae mem/2. */
5174FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5175
5176/** Opcode 0x0f 0xae mem/3. */
5177FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5178
5179/** Opcode 0x0f 0xae mem/4. */
5180FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5181
5182/** Opcode 0x0f 0xae mem/5. */
5183FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5184
5185/** Opcode 0x0f 0xae mem/6. */
5186FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5187
5188/** Opcode 0x0f 0xae mem/7. */
5189FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5190
5191
5192/** Opcode 0x0f 0xae 11b/5. */
5193FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5194{
5195 IEMOP_MNEMONIC("lfence");
5196 IEMOP_HLP_NO_LOCK_PREFIX();
5197 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5198 return IEMOP_RAISE_INVALID_OPCODE();
5199
5200 IEM_MC_BEGIN(0, 0);
5201 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5202 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5203 else
5204 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 return VINF_SUCCESS;
5208}
5209
5210
5211/** Opcode 0x0f 0xae 11b/6. */
5212FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5213{
5214 IEMOP_MNEMONIC("mfence");
5215 IEMOP_HLP_NO_LOCK_PREFIX();
5216 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5217 return IEMOP_RAISE_INVALID_OPCODE();
5218
5219 IEM_MC_BEGIN(0, 0);
5220 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5221 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5222 else
5223 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5224 IEM_MC_ADVANCE_RIP();
5225 IEM_MC_END();
5226 return VINF_SUCCESS;
5227}
5228
5229
5230/** Opcode 0x0f 0xae 11b/7. */
5231FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5232{
5233 IEMOP_MNEMONIC("sfence");
5234 IEMOP_HLP_NO_LOCK_PREFIX();
5235 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5236 return IEMOP_RAISE_INVALID_OPCODE();
5237
5238 IEM_MC_BEGIN(0, 0);
5239 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5240 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5241 else
5242 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5243 IEM_MC_ADVANCE_RIP();
5244 IEM_MC_END();
5245 return VINF_SUCCESS;
5246}
5247
5248
5249/** Opcode 0xf3 0x0f 0xae 11b/0. */
5250FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5251
5252/** Opcode 0xf3 0x0f 0xae 11b/1. */
5253FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5254
5255/** Opcode 0xf3 0x0f 0xae 11b/2. */
5256FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5257
5258/** Opcode 0xf3 0x0f 0xae 11b/3. */
5259FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5260
5261
5262/** Opcode 0x0f 0xae. */
5263FNIEMOP_DEF(iemOp_Grp15)
5264{
5265 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5267 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5268 {
5269 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5270 {
5271 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5272 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5273 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5274 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5275 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5276 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5277 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5278 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5280 }
5281 }
5282 else
5283 {
5284 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5285 {
5286 case 0:
5287 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5288 {
5289 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5290 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5291 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5292 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5293 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5294 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5295 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5296 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5298 }
5299 break;
5300
5301 case IEM_OP_PRF_REPZ:
5302 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5303 {
5304 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5305 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5306 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5307 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5308 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5309 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5310 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5311 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 break;
5315
5316 default:
5317 return IEMOP_RAISE_INVALID_OPCODE();
5318 }
5319 }
5320}
5321
5322
5323/** Opcode 0x0f 0xaf. */
5324FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5325{
5326 IEMOP_MNEMONIC("imul Gv,Ev");
5327 IEMOP_HLP_MIN_386();
5328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5330}
5331
5332
5333/** Opcode 0x0f 0xb0. */
5334FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5335{
5336 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5337 IEMOP_HLP_MIN_486();
5338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5339
5340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5341 {
5342 IEMOP_HLP_DONE_DECODING();
5343 IEM_MC_BEGIN(4, 0);
5344 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5345 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5346 IEM_MC_ARG(uint8_t, u8Src, 2);
5347 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5348
5349 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5350 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5351 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5352 IEM_MC_REF_EFLAGS(pEFlags);
5353 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5355 else
5356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5357
5358 IEM_MC_ADVANCE_RIP();
5359 IEM_MC_END();
5360 }
5361 else
5362 {
5363 IEM_MC_BEGIN(4, 3);
5364 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5365 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5366 IEM_MC_ARG(uint8_t, u8Src, 2);
5367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5369 IEM_MC_LOCAL(uint8_t, u8Al);
5370
5371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5372 IEMOP_HLP_DONE_DECODING();
5373 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5374 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5375 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5376 IEM_MC_FETCH_EFLAGS(EFlags);
5377 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5378 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5379 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5380 else
5381 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5382
5383 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5384 IEM_MC_COMMIT_EFLAGS(EFlags);
5385 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5386 IEM_MC_ADVANCE_RIP();
5387 IEM_MC_END();
5388 }
5389 return VINF_SUCCESS;
5390}
5391
5392/** Opcode 0x0f 0xb1. */
5393FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5394{
5395 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5396 IEMOP_HLP_MIN_486();
5397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5398
5399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5400 {
5401 IEMOP_HLP_DONE_DECODING();
5402 switch (pIemCpu->enmEffOpSize)
5403 {
5404 case IEMMODE_16BIT:
5405 IEM_MC_BEGIN(4, 0);
5406 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5407 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5408 IEM_MC_ARG(uint16_t, u16Src, 2);
5409 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5410
5411 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5412 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5413 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5414 IEM_MC_REF_EFLAGS(pEFlags);
5415 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5416 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5417 else
5418 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5419
5420 IEM_MC_ADVANCE_RIP();
5421 IEM_MC_END();
5422 return VINF_SUCCESS;
5423
5424 case IEMMODE_32BIT:
5425 IEM_MC_BEGIN(4, 0);
5426 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5427 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5428 IEM_MC_ARG(uint32_t, u32Src, 2);
5429 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5430
5431 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5432 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5433 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5434 IEM_MC_REF_EFLAGS(pEFlags);
5435 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5436 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5437 else
5438 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5439
5440 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5441 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5442 IEM_MC_ADVANCE_RIP();
5443 IEM_MC_END();
5444 return VINF_SUCCESS;
5445
5446 case IEMMODE_64BIT:
5447 IEM_MC_BEGIN(4, 0);
5448 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5449 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5450#ifdef RT_ARCH_X86
5451 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5452#else
5453 IEM_MC_ARG(uint64_t, u64Src, 2);
5454#endif
5455 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5456
5457 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5458 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5459 IEM_MC_REF_EFLAGS(pEFlags);
5460#ifdef RT_ARCH_X86
5461 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5462 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5463 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5464 else
5465 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5466#else
5467 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5468 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5469 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5470 else
5471 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5472#endif
5473
5474 IEM_MC_ADVANCE_RIP();
5475 IEM_MC_END();
5476 return VINF_SUCCESS;
5477
5478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5479 }
5480 }
5481 else
5482 {
5483 switch (pIemCpu->enmEffOpSize)
5484 {
5485 case IEMMODE_16BIT:
5486 IEM_MC_BEGIN(4, 3);
5487 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5488 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5489 IEM_MC_ARG(uint16_t, u16Src, 2);
5490 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5492 IEM_MC_LOCAL(uint16_t, u16Ax);
5493
5494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5495 IEMOP_HLP_DONE_DECODING();
5496 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5497 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5498 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5499 IEM_MC_FETCH_EFLAGS(EFlags);
5500 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5501 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5502 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5505
5506 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5507 IEM_MC_COMMIT_EFLAGS(EFlags);
5508 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5509 IEM_MC_ADVANCE_RIP();
5510 IEM_MC_END();
5511 return VINF_SUCCESS;
5512
5513 case IEMMODE_32BIT:
5514 IEM_MC_BEGIN(4, 3);
5515 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5516 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5517 IEM_MC_ARG(uint32_t, u32Src, 2);
5518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5520 IEM_MC_LOCAL(uint32_t, u32Eax);
5521
5522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5523 IEMOP_HLP_DONE_DECODING();
5524 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5525 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5526 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5527 IEM_MC_FETCH_EFLAGS(EFlags);
5528 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5529 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5530 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5531 else
5532 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5533
5534 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5535 IEM_MC_COMMIT_EFLAGS(EFlags);
5536 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5537 IEM_MC_ADVANCE_RIP();
5538 IEM_MC_END();
5539 return VINF_SUCCESS;
5540
5541 case IEMMODE_64BIT:
5542 IEM_MC_BEGIN(4, 3);
5543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5544 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5545#ifdef RT_ARCH_X86
5546 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5547#else
5548 IEM_MC_ARG(uint64_t, u64Src, 2);
5549#endif
5550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5552 IEM_MC_LOCAL(uint64_t, u64Rax);
5553
5554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5555 IEMOP_HLP_DONE_DECODING();
5556 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5557 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5558 IEM_MC_FETCH_EFLAGS(EFlags);
5559 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5560#ifdef RT_ARCH_X86
5561 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5562 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5563 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5564 else
5565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5566#else
5567 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5568 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5569 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5570 else
5571 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5572#endif
5573
5574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5575 IEM_MC_COMMIT_EFLAGS(EFlags);
5576 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 return VINF_SUCCESS;
5580
5581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5582 }
5583 }
5584}
5585
5586
5587FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5588{
5589 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5590 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5591
5592 switch (pIemCpu->enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(5, 1);
5596 IEM_MC_ARG(uint16_t, uSel, 0);
5597 IEM_MC_ARG(uint16_t, offSeg, 1);
5598 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5599 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5600 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5601 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5604 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5605 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5606 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5607 IEM_MC_END();
5608 return VINF_SUCCESS;
5609
5610 case IEMMODE_32BIT:
5611 IEM_MC_BEGIN(5, 1);
5612 IEM_MC_ARG(uint16_t, uSel, 0);
5613 IEM_MC_ARG(uint32_t, offSeg, 1);
5614 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5615 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5616 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5617 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5620 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5621 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5622 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5623 IEM_MC_END();
5624 return VINF_SUCCESS;
5625
5626 case IEMMODE_64BIT:
5627 IEM_MC_BEGIN(5, 1);
5628 IEM_MC_ARG(uint16_t, uSel, 0);
5629 IEM_MC_ARG(uint64_t, offSeg, 1);
5630 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5631 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5632 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5633 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5637 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5638 else
5639 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5640 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5641 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5642 IEM_MC_END();
5643 return VINF_SUCCESS;
5644
5645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5646 }
5647}
5648
5649
5650/** Opcode 0x0f 0xb2. */
5651FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5652{
5653 IEMOP_MNEMONIC("lss Gv,Mp");
5654 IEMOP_HLP_MIN_386();
5655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5657 return IEMOP_RAISE_INVALID_OPCODE();
5658 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5659}
5660
5661
5662/** Opcode 0x0f 0xb3. */
5663FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5664{
5665 IEMOP_MNEMONIC("btr Ev,Gv");
5666 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5667}
5668
5669
5670/** Opcode 0x0f 0xb4. */
5671FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5672{
5673 IEMOP_MNEMONIC("lfs Gv,Mp");
5674 IEMOP_HLP_MIN_386();
5675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5676 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5677 return IEMOP_RAISE_INVALID_OPCODE();
5678 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5679}
5680
5681
5682/** Opcode 0x0f 0xb5. */
5683FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5684{
5685 IEMOP_MNEMONIC("lgs Gv,Mp");
5686 IEMOP_HLP_MIN_386();
5687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5689 return IEMOP_RAISE_INVALID_OPCODE();
5690 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5691}
5692
5693
5694/** Opcode 0x0f 0xb6. */
5695FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5696{
5697 IEMOP_MNEMONIC("movzx Gv,Eb");
5698 IEMOP_HLP_MIN_386();
5699
5700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5701 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5702
5703 /*
5704 * If rm is denoting a register, no more instruction bytes.
5705 */
5706 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5707 {
5708 switch (pIemCpu->enmEffOpSize)
5709 {
5710 case IEMMODE_16BIT:
5711 IEM_MC_BEGIN(0, 1);
5712 IEM_MC_LOCAL(uint16_t, u16Value);
5713 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5714 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5715 IEM_MC_ADVANCE_RIP();
5716 IEM_MC_END();
5717 return VINF_SUCCESS;
5718
5719 case IEMMODE_32BIT:
5720 IEM_MC_BEGIN(0, 1);
5721 IEM_MC_LOCAL(uint32_t, u32Value);
5722 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5723 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5724 IEM_MC_ADVANCE_RIP();
5725 IEM_MC_END();
5726 return VINF_SUCCESS;
5727
5728 case IEMMODE_64BIT:
5729 IEM_MC_BEGIN(0, 1);
5730 IEM_MC_LOCAL(uint64_t, u64Value);
5731 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5732 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5733 IEM_MC_ADVANCE_RIP();
5734 IEM_MC_END();
5735 return VINF_SUCCESS;
5736
5737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5738 }
5739 }
5740 else
5741 {
5742 /*
5743 * We're loading a register from memory.
5744 */
5745 switch (pIemCpu->enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(0, 2);
5749 IEM_MC_LOCAL(uint16_t, u16Value);
5750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5752 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5753 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5754 IEM_MC_ADVANCE_RIP();
5755 IEM_MC_END();
5756 return VINF_SUCCESS;
5757
5758 case IEMMODE_32BIT:
5759 IEM_MC_BEGIN(0, 2);
5760 IEM_MC_LOCAL(uint32_t, u32Value);
5761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5763 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5764 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767 return VINF_SUCCESS;
5768
5769 case IEMMODE_64BIT:
5770 IEM_MC_BEGIN(0, 2);
5771 IEM_MC_LOCAL(uint64_t, u64Value);
5772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5774 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5775 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5776 IEM_MC_ADVANCE_RIP();
5777 IEM_MC_END();
5778 return VINF_SUCCESS;
5779
5780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5781 }
5782 }
5783}
5784
5785
5786/** Opcode 0x0f 0xb7. */
5787FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5788{
5789 IEMOP_MNEMONIC("movzx Gv,Ew");
5790 IEMOP_HLP_MIN_386();
5791
5792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5793 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5794
5795 /** @todo Not entirely sure how the operand size prefix is handled here,
5796 * assuming that it will be ignored. Would be nice to have a few
5797 * test for this. */
5798 /*
5799 * If rm is denoting a register, no more instruction bytes.
5800 */
5801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5802 {
5803 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5804 {
5805 IEM_MC_BEGIN(0, 1);
5806 IEM_MC_LOCAL(uint32_t, u32Value);
5807 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5808 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5809 IEM_MC_ADVANCE_RIP();
5810 IEM_MC_END();
5811 }
5812 else
5813 {
5814 IEM_MC_BEGIN(0, 1);
5815 IEM_MC_LOCAL(uint64_t, u64Value);
5816 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5817 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5818 IEM_MC_ADVANCE_RIP();
5819 IEM_MC_END();
5820 }
5821 }
5822 else
5823 {
5824 /*
5825 * We're loading a register from memory.
5826 */
5827 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5828 {
5829 IEM_MC_BEGIN(0, 2);
5830 IEM_MC_LOCAL(uint32_t, u32Value);
5831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5833 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5834 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5835 IEM_MC_ADVANCE_RIP();
5836 IEM_MC_END();
5837 }
5838 else
5839 {
5840 IEM_MC_BEGIN(0, 2);
5841 IEM_MC_LOCAL(uint64_t, u64Value);
5842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5844 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5845 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5846 IEM_MC_ADVANCE_RIP();
5847 IEM_MC_END();
5848 }
5849 }
5850 return VINF_SUCCESS;
5851}
5852
5853
5854/** Opcode 0x0f 0xb8. */
5855FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5856
5857
5858/** Opcode 0x0f 0xb9. */
5859FNIEMOP_DEF(iemOp_Grp10)
5860{
5861 Log(("iemOp_Grp10 -> #UD\n"));
5862 return IEMOP_RAISE_INVALID_OPCODE();
5863}
5864
5865
5866/** Opcode 0x0f 0xba. */
5867FNIEMOP_DEF(iemOp_Grp8)
5868{
5869 IEMOP_HLP_MIN_386();
5870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5871 PCIEMOPBINSIZES pImpl;
5872 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5873 {
5874 case 0: case 1: case 2: case 3:
5875 return IEMOP_RAISE_INVALID_OPCODE();
5876 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5877 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5878 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5879 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5881 }
5882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5883
5884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5885 {
5886 /* register destination. */
5887 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5888 IEMOP_HLP_NO_LOCK_PREFIX();
5889
5890 switch (pIemCpu->enmEffOpSize)
5891 {
5892 case IEMMODE_16BIT:
5893 IEM_MC_BEGIN(3, 0);
5894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5895 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5897
5898 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5899 IEM_MC_REF_EFLAGS(pEFlags);
5900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5901
5902 IEM_MC_ADVANCE_RIP();
5903 IEM_MC_END();
5904 return VINF_SUCCESS;
5905
5906 case IEMMODE_32BIT:
5907 IEM_MC_BEGIN(3, 0);
5908 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5909 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5910 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5911
5912 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5913 IEM_MC_REF_EFLAGS(pEFlags);
5914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5915
5916 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5917 IEM_MC_ADVANCE_RIP();
5918 IEM_MC_END();
5919 return VINF_SUCCESS;
5920
5921 case IEMMODE_64BIT:
5922 IEM_MC_BEGIN(3, 0);
5923 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5924 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5926
5927 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5928 IEM_MC_REF_EFLAGS(pEFlags);
5929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5930
5931 IEM_MC_ADVANCE_RIP();
5932 IEM_MC_END();
5933 return VINF_SUCCESS;
5934
5935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5936 }
5937 }
5938 else
5939 {
5940 /* memory destination. */
5941
5942 uint32_t fAccess;
5943 if (pImpl->pfnLockedU16)
5944 fAccess = IEM_ACCESS_DATA_RW;
5945 else /* BT */
5946 {
5947 IEMOP_HLP_NO_LOCK_PREFIX();
5948 fAccess = IEM_ACCESS_DATA_R;
5949 }
5950
5951 /** @todo test negative bit offsets! */
5952 switch (pIemCpu->enmEffOpSize)
5953 {
5954 case IEMMODE_16BIT:
5955 IEM_MC_BEGIN(3, 1);
5956 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5957 IEM_MC_ARG(uint16_t, u16Src, 1);
5958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5960
5961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5962 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5963 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5964 IEM_MC_FETCH_EFLAGS(EFlags);
5965 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5966 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5967 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5968 else
5969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5971
5972 IEM_MC_COMMIT_EFLAGS(EFlags);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 return VINF_SUCCESS;
5976
5977 case IEMMODE_32BIT:
5978 IEM_MC_BEGIN(3, 1);
5979 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5980 IEM_MC_ARG(uint32_t, u32Src, 1);
5981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5983
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5985 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5986 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5987 IEM_MC_FETCH_EFLAGS(EFlags);
5988 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5989 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5991 else
5992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5994
5995 IEM_MC_COMMIT_EFLAGS(EFlags);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 case IEMMODE_64BIT:
6001 IEM_MC_BEGIN(3, 1);
6002 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6003 IEM_MC_ARG(uint64_t, u64Src, 1);
6004 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6006
6007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6008 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6009 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6010 IEM_MC_FETCH_EFLAGS(EFlags);
6011 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6012 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6013 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6014 else
6015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6017
6018 IEM_MC_COMMIT_EFLAGS(EFlags);
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 return VINF_SUCCESS;
6022
6023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6024 }
6025 }
6026
6027}
6028
6029
6030/** Opcode 0x0f 0xbb. */
6031FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6032{
6033 IEMOP_MNEMONIC("btc Ev,Gv");
6034 IEMOP_HLP_MIN_386();
6035 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6036}
6037
6038
6039/** Opcode 0x0f 0xbc. */
6040FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6041{
6042 IEMOP_MNEMONIC("bsf Gv,Ev");
6043 IEMOP_HLP_MIN_386();
6044 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6045 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6046}
6047
6048
6049/** Opcode 0x0f 0xbd. */
6050FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6051{
6052 IEMOP_MNEMONIC("bsr Gv,Ev");
6053 IEMOP_HLP_MIN_386();
6054 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6055 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6056}
6057
6058
6059/** Opcode 0x0f 0xbe. */
6060FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6061{
6062 IEMOP_MNEMONIC("movsx Gv,Eb");
6063 IEMOP_HLP_MIN_386();
6064
6065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6066 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6067
6068 /*
6069 * If rm is denoting a register, no more instruction bytes.
6070 */
6071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6072 {
6073 switch (pIemCpu->enmEffOpSize)
6074 {
6075 case IEMMODE_16BIT:
6076 IEM_MC_BEGIN(0, 1);
6077 IEM_MC_LOCAL(uint16_t, u16Value);
6078 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6079 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 return VINF_SUCCESS;
6083
6084 case IEMMODE_32BIT:
6085 IEM_MC_BEGIN(0, 1);
6086 IEM_MC_LOCAL(uint32_t, u32Value);
6087 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6088 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 return VINF_SUCCESS;
6092
6093 case IEMMODE_64BIT:
6094 IEM_MC_BEGIN(0, 1);
6095 IEM_MC_LOCAL(uint64_t, u64Value);
6096 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6097 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6098 IEM_MC_ADVANCE_RIP();
6099 IEM_MC_END();
6100 return VINF_SUCCESS;
6101
6102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6103 }
6104 }
6105 else
6106 {
6107 /*
6108 * We're loading a register from memory.
6109 */
6110 switch (pIemCpu->enmEffOpSize)
6111 {
6112 case IEMMODE_16BIT:
6113 IEM_MC_BEGIN(0, 2);
6114 IEM_MC_LOCAL(uint16_t, u16Value);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6117 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6118 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6119 IEM_MC_ADVANCE_RIP();
6120 IEM_MC_END();
6121 return VINF_SUCCESS;
6122
6123 case IEMMODE_32BIT:
6124 IEM_MC_BEGIN(0, 2);
6125 IEM_MC_LOCAL(uint32_t, u32Value);
6126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6128 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6129 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6130 IEM_MC_ADVANCE_RIP();
6131 IEM_MC_END();
6132 return VINF_SUCCESS;
6133
6134 case IEMMODE_64BIT:
6135 IEM_MC_BEGIN(0, 2);
6136 IEM_MC_LOCAL(uint64_t, u64Value);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6139 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6140 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6141 IEM_MC_ADVANCE_RIP();
6142 IEM_MC_END();
6143 return VINF_SUCCESS;
6144
6145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6146 }
6147 }
6148}
6149
6150
6151/** Opcode 0x0f 0xbf. */
6152FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6153{
6154 IEMOP_MNEMONIC("movsx Gv,Ew");
6155 IEMOP_HLP_MIN_386();
6156
6157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6158 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6159
6160 /** @todo Not entirely sure how the operand size prefix is handled here,
6161 * assuming that it will be ignored. Would be nice to have a few
6162 * test for this. */
6163 /*
6164 * If rm is denoting a register, no more instruction bytes.
6165 */
6166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6167 {
6168 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6169 {
6170 IEM_MC_BEGIN(0, 1);
6171 IEM_MC_LOCAL(uint32_t, u32Value);
6172 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6173 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6174 IEM_MC_ADVANCE_RIP();
6175 IEM_MC_END();
6176 }
6177 else
6178 {
6179 IEM_MC_BEGIN(0, 1);
6180 IEM_MC_LOCAL(uint64_t, u64Value);
6181 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6182 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6183 IEM_MC_ADVANCE_RIP();
6184 IEM_MC_END();
6185 }
6186 }
6187 else
6188 {
6189 /*
6190 * We're loading a register from memory.
6191 */
6192 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6193 {
6194 IEM_MC_BEGIN(0, 2);
6195 IEM_MC_LOCAL(uint32_t, u32Value);
6196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6198 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6199 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6200 IEM_MC_ADVANCE_RIP();
6201 IEM_MC_END();
6202 }
6203 else
6204 {
6205 IEM_MC_BEGIN(0, 2);
6206 IEM_MC_LOCAL(uint64_t, u64Value);
6207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6209 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6210 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6211 IEM_MC_ADVANCE_RIP();
6212 IEM_MC_END();
6213 }
6214 }
6215 return VINF_SUCCESS;
6216}
6217
6218
6219/** Opcode 0x0f 0xc0. */
6220FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6221{
6222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6223 IEMOP_HLP_MIN_486();
6224 IEMOP_MNEMONIC("xadd Eb,Gb");
6225
6226 /*
6227 * If rm is denoting a register, no more instruction bytes.
6228 */
6229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6230 {
6231 IEMOP_HLP_NO_LOCK_PREFIX();
6232
6233 IEM_MC_BEGIN(3, 0);
6234 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6235 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6236 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6237
6238 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6239 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6240 IEM_MC_REF_EFLAGS(pEFlags);
6241 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6242
6243 IEM_MC_ADVANCE_RIP();
6244 IEM_MC_END();
6245 }
6246 else
6247 {
6248 /*
6249 * We're accessing memory.
6250 */
6251 IEM_MC_BEGIN(3, 3);
6252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6253 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6254 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6255 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6257
6258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6259 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6260 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6261 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6262 IEM_MC_FETCH_EFLAGS(EFlags);
6263 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6264 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6265 else
6266 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6267
6268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6269 IEM_MC_COMMIT_EFLAGS(EFlags);
6270 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6271 IEM_MC_ADVANCE_RIP();
6272 IEM_MC_END();
6273 return VINF_SUCCESS;
6274 }
6275 return VINF_SUCCESS;
6276}
6277
6278
6279/** Opcode 0x0f 0xc1. */
6280FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6281{
6282 IEMOP_MNEMONIC("xadd Ev,Gv");
6283 IEMOP_HLP_MIN_486();
6284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6285
6286 /*
6287 * If rm is denoting a register, no more instruction bytes.
6288 */
6289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6290 {
6291 IEMOP_HLP_NO_LOCK_PREFIX();
6292
6293 switch (pIemCpu->enmEffOpSize)
6294 {
6295 case IEMMODE_16BIT:
6296 IEM_MC_BEGIN(3, 0);
6297 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6298 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6299 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6300
6301 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6302 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6303 IEM_MC_REF_EFLAGS(pEFlags);
6304 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6305
6306 IEM_MC_ADVANCE_RIP();
6307 IEM_MC_END();
6308 return VINF_SUCCESS;
6309
6310 case IEMMODE_32BIT:
6311 IEM_MC_BEGIN(3, 0);
6312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6313 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6315
6316 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6317 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6318 IEM_MC_REF_EFLAGS(pEFlags);
6319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6320
6321 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6322 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6323 IEM_MC_ADVANCE_RIP();
6324 IEM_MC_END();
6325 return VINF_SUCCESS;
6326
6327 case IEMMODE_64BIT:
6328 IEM_MC_BEGIN(3, 0);
6329 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6330 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6332
6333 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6334 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6335 IEM_MC_REF_EFLAGS(pEFlags);
6336 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6337
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 return VINF_SUCCESS;
6341
6342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6343 }
6344 }
6345 else
6346 {
6347 /*
6348 * We're accessing memory.
6349 */
6350 switch (pIemCpu->enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(3, 3);
6354 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6355 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6356 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6357 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6359
6360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6361 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6362 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6363 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6364 IEM_MC_FETCH_EFLAGS(EFlags);
6365 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6366 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6367 else
6368 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6369
6370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6371 IEM_MC_COMMIT_EFLAGS(EFlags);
6372 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 return VINF_SUCCESS;
6376
6377 case IEMMODE_32BIT:
6378 IEM_MC_BEGIN(3, 3);
6379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6380 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6381 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6382 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6384
6385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6386 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6387 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6388 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6389 IEM_MC_FETCH_EFLAGS(EFlags);
6390 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6391 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6392 else
6393 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6394
6395 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6396 IEM_MC_COMMIT_EFLAGS(EFlags);
6397 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 case IEMMODE_64BIT:
6403 IEM_MC_BEGIN(3, 3);
6404 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6405 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6406 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6407 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6409
6410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6411 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6412 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6413 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6414 IEM_MC_FETCH_EFLAGS(EFlags);
6415 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6416 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6417 else
6418 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6419
6420 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6421 IEM_MC_COMMIT_EFLAGS(EFlags);
6422 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6423 IEM_MC_ADVANCE_RIP();
6424 IEM_MC_END();
6425 return VINF_SUCCESS;
6426
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6428 }
6429 }
6430}
6431
6432/** Opcode 0x0f 0xc2. */
6433FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6434
6435/** Opcode 0x0f 0xc3. */
6436FNIEMOP_STUB(iemOp_movnti_My_Gy);
6437
6438/** Opcode 0x0f 0xc4. */
6439FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6440
6441/** Opcode 0x0f 0xc5. */
6442FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6443
6444/** Opcode 0x0f 0xc6. */
6445FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6446
6447
6448/** Opcode 0x0f 0xc7 !11/1. */
6449FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6450{
6451 IEMOP_MNEMONIC("cmpxchg8b Mq");
6452
6453 IEM_MC_BEGIN(4, 3);
6454 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6455 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6456 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6457 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6458 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6459 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6461
6462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6463 IEMOP_HLP_DONE_DECODING();
6464 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6465
6466 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6467 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6468 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6469
6470 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6471 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6472 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6473
6474 IEM_MC_FETCH_EFLAGS(EFlags);
6475 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6476 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6477 else
6478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6479
6480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6481 IEM_MC_COMMIT_EFLAGS(EFlags);
6482 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6483 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6484 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6485 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6486 IEM_MC_ENDIF();
6487 IEM_MC_ADVANCE_RIP();
6488
6489 IEM_MC_END();
6490 return VINF_SUCCESS;
6491}
6492
6493
6494/** Opcode REX.W 0x0f 0xc7 !11/1. */
6495FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6496
6497/** Opcode 0x0f 0xc7 11/6. */
6498FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6499
6500/** Opcode 0x0f 0xc7 !11/6. */
6501FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6502
6503/** Opcode 0x66 0x0f 0xc7 !11/6. */
6504FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6505
6506/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6507FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6508
6509/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6510FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6511
6512
6513/** Opcode 0x0f 0xc7. */
6514FNIEMOP_DEF(iemOp_Grp9)
6515{
6516 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6518 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6519 {
6520 case 0: case 2: case 3: case 4: case 5:
6521 return IEMOP_RAISE_INVALID_OPCODE();
6522 case 1:
6523 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6524 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6525 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6526 return IEMOP_RAISE_INVALID_OPCODE();
6527 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6528 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6529 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6530 case 6:
6531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6532 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6533 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6534 {
6535 case 0:
6536 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6537 case IEM_OP_PRF_SIZE_OP:
6538 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6539 case IEM_OP_PRF_REPZ:
6540 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6541 default:
6542 return IEMOP_RAISE_INVALID_OPCODE();
6543 }
6544 case 7:
6545 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6546 {
6547 case 0:
6548 case IEM_OP_PRF_REPZ:
6549 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6550 default:
6551 return IEMOP_RAISE_INVALID_OPCODE();
6552 }
6553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6554 }
6555}
6556
6557
6558/**
6559 * Common 'bswap register' helper.
6560 */
6561FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6562{
6563 IEMOP_HLP_NO_LOCK_PREFIX();
6564 switch (pIemCpu->enmEffOpSize)
6565 {
6566 case IEMMODE_16BIT:
6567 IEM_MC_BEGIN(1, 0);
6568 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6569 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6570 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 return VINF_SUCCESS;
6574
6575 case IEMMODE_32BIT:
6576 IEM_MC_BEGIN(1, 0);
6577 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6578 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6579 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6580 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6581 IEM_MC_ADVANCE_RIP();
6582 IEM_MC_END();
6583 return VINF_SUCCESS;
6584
6585 case IEMMODE_64BIT:
6586 IEM_MC_BEGIN(1, 0);
6587 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6588 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6589 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6590 IEM_MC_ADVANCE_RIP();
6591 IEM_MC_END();
6592 return VINF_SUCCESS;
6593
6594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6595 }
6596}
6597
6598
6599/** Opcode 0x0f 0xc8. */
6600FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6601{
6602 IEMOP_MNEMONIC("bswap rAX/r8");
6603 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6604 prefix. REX.B is the correct prefix it appears. For a parallel
6605 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6606 IEMOP_HLP_MIN_486();
6607 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6608}
6609
6610
6611/** Opcode 0x0f 0xc9. */
6612FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6613{
6614 IEMOP_MNEMONIC("bswap rCX/r9");
6615 IEMOP_HLP_MIN_486();
6616 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6617}
6618
6619
6620/** Opcode 0x0f 0xca. */
6621FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6622{
6623 IEMOP_MNEMONIC("bswap rDX/r9");
6624 IEMOP_HLP_MIN_486();
6625 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6626}
6627
6628
6629/** Opcode 0x0f 0xcb. */
6630FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6631{
6632 IEMOP_MNEMONIC("bswap rBX/r9");
6633 IEMOP_HLP_MIN_486();
6634 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6635}
6636
6637
6638/** Opcode 0x0f 0xcc. */
6639FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6640{
6641 IEMOP_MNEMONIC("bswap rSP/r12");
6642 IEMOP_HLP_MIN_486();
6643 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6644}
6645
6646
6647/** Opcode 0x0f 0xcd. */
6648FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6649{
6650 IEMOP_MNEMONIC("bswap rBP/r13");
6651 IEMOP_HLP_MIN_486();
6652 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6653}
6654
6655
6656/** Opcode 0x0f 0xce. */
6657FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6658{
6659 IEMOP_MNEMONIC("bswap rSI/r14");
6660 IEMOP_HLP_MIN_486();
6661 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6662}
6663
6664
6665/** Opcode 0x0f 0xcf. */
6666FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6667{
6668 IEMOP_MNEMONIC("bswap rDI/r15");
6669 IEMOP_HLP_MIN_486();
6670 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6671}
6672
6673
6674
6675/** Opcode 0x0f 0xd0. */
6676FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6677/** Opcode 0x0f 0xd1. */
6678FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6679/** Opcode 0x0f 0xd2. */
6680FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6681/** Opcode 0x0f 0xd3. */
6682FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6683/** Opcode 0x0f 0xd4. */
6684FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6685/** Opcode 0x0f 0xd5. */
6686FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6687/** Opcode 0x0f 0xd6. */
6688FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6689
6690
6691/** Opcode 0x0f 0xd7. */
6692FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6693{
6694 /* Docs says register only. */
6695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6696 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6697 return IEMOP_RAISE_INVALID_OPCODE();
6698
6699 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6700 /** @todo testcase: Check that the instruction implicitly clears the high
6701 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6702 * and opcode modifications are made to work with the whole width (not
6703 * just 128). */
6704 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6705 {
6706 case IEM_OP_PRF_SIZE_OP: /* SSE */
6707 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6708 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6709 IEM_MC_BEGIN(2, 0);
6710 IEM_MC_ARG(uint64_t *, pDst, 0);
6711 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6713 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6714 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6715 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6716 IEM_MC_ADVANCE_RIP();
6717 IEM_MC_END();
6718 return VINF_SUCCESS;
6719
6720 case 0: /* MMX */
6721 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6722 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6723 IEM_MC_BEGIN(2, 0);
6724 IEM_MC_ARG(uint64_t *, pDst, 0);
6725 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6727 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6728 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6729 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6730 IEM_MC_ADVANCE_RIP();
6731 IEM_MC_END();
6732 return VINF_SUCCESS;
6733
6734 default:
6735 return IEMOP_RAISE_INVALID_OPCODE();
6736 }
6737}
6738
6739
6740/** Opcode 0x0f 0xd8. */
6741FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6742/** Opcode 0x0f 0xd9. */
6743FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6744/** Opcode 0x0f 0xda. */
6745FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6746/** Opcode 0x0f 0xdb. */
6747FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6748/** Opcode 0x0f 0xdc. */
6749FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6750/** Opcode 0x0f 0xdd. */
6751FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6752/** Opcode 0x0f 0xde. */
6753FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6754/** Opcode 0x0f 0xdf. */
6755FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6756/** Opcode 0x0f 0xe0. */
6757FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6758/** Opcode 0x0f 0xe1. */
6759FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6760/** Opcode 0x0f 0xe2. */
6761FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6762/** Opcode 0x0f 0xe3. */
6763FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6764/** Opcode 0x0f 0xe4. */
6765FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6766/** Opcode 0x0f 0xe5. */
6767FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6768/** Opcode 0x0f 0xe6. */
6769FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6770/** Opcode 0x0f 0xe7. */
6771FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6772/** Opcode 0x0f 0xe8. */
6773FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6774/** Opcode 0x0f 0xe9. */
6775FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6776/** Opcode 0x0f 0xea. */
6777FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6778/** Opcode 0x0f 0xeb. */
6779FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6780/** Opcode 0x0f 0xec. */
6781FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6782/** Opcode 0x0f 0xed. */
6783FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6784/** Opcode 0x0f 0xee. */
6785FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6786
6787
6788/** Opcode 0x0f 0xef. */
6789FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6790{
6791 IEMOP_MNEMONIC("pxor");
6792 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6793}
6794
6795
6796/** Opcode 0x0f 0xf0. */
6797FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6798/** Opcode 0x0f 0xf1. */
6799FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6800/** Opcode 0x0f 0xf2. */
6801FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6802/** Opcode 0x0f 0xf3. */
6803FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6804/** Opcode 0x0f 0xf4. */
6805FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6806/** Opcode 0x0f 0xf5. */
6807FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6808/** Opcode 0x0f 0xf6. */
6809FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6810/** Opcode 0x0f 0xf7. */
6811FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6812/** Opcode 0x0f 0xf8. */
6813FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6814/** Opcode 0x0f 0xf9. */
6815FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6816/** Opcode 0x0f 0xfa. */
6817FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6818/** Opcode 0x0f 0xfb. */
6819FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6820/** Opcode 0x0f 0xfc. */
6821FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6822/** Opcode 0x0f 0xfd. */
6823FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6824/** Opcode 0x0f 0xfe. */
6825FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6826
6827
6828const PFNIEMOP g_apfnTwoByteMap[256] =
6829{
6830 /* 0x00 */ iemOp_Grp6,
6831 /* 0x01 */ iemOp_Grp7,
6832 /* 0x02 */ iemOp_lar_Gv_Ew,
6833 /* 0x03 */ iemOp_lsl_Gv_Ew,
6834 /* 0x04 */ iemOp_Invalid,
6835 /* 0x05 */ iemOp_syscall,
6836 /* 0x06 */ iemOp_clts,
6837 /* 0x07 */ iemOp_sysret,
6838 /* 0x08 */ iemOp_invd,
6839 /* 0x09 */ iemOp_wbinvd,
6840 /* 0x0a */ iemOp_Invalid,
6841 /* 0x0b */ iemOp_ud2,
6842 /* 0x0c */ iemOp_Invalid,
6843 /* 0x0d */ iemOp_nop_Ev_GrpP,
6844 /* 0x0e */ iemOp_femms,
6845 /* 0x0f */ iemOp_3Dnow,
6846 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6847 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6848 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6849 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6850 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6851 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6852 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6853 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6854 /* 0x18 */ iemOp_prefetch_Grp16,
6855 /* 0x19 */ iemOp_nop_Ev,
6856 /* 0x1a */ iemOp_nop_Ev,
6857 /* 0x1b */ iemOp_nop_Ev,
6858 /* 0x1c */ iemOp_nop_Ev,
6859 /* 0x1d */ iemOp_nop_Ev,
6860 /* 0x1e */ iemOp_nop_Ev,
6861 /* 0x1f */ iemOp_nop_Ev,
6862 /* 0x20 */ iemOp_mov_Rd_Cd,
6863 /* 0x21 */ iemOp_mov_Rd_Dd,
6864 /* 0x22 */ iemOp_mov_Cd_Rd,
6865 /* 0x23 */ iemOp_mov_Dd_Rd,
6866 /* 0x24 */ iemOp_mov_Rd_Td,
6867 /* 0x25 */ iemOp_Invalid,
6868 /* 0x26 */ iemOp_mov_Td_Rd,
6869 /* 0x27 */ iemOp_Invalid,
6870 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6871 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6872 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6873 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6874 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6875 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6876 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6877 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6878 /* 0x30 */ iemOp_wrmsr,
6879 /* 0x31 */ iemOp_rdtsc,
6880 /* 0x32 */ iemOp_rdmsr,
6881 /* 0x33 */ iemOp_rdpmc,
6882 /* 0x34 */ iemOp_sysenter,
6883 /* 0x35 */ iemOp_sysexit,
6884 /* 0x36 */ iemOp_Invalid,
6885 /* 0x37 */ iemOp_getsec,
6886 /* 0x38 */ iemOp_3byte_Esc_A4,
6887 /* 0x39 */ iemOp_Invalid,
6888 /* 0x3a */ iemOp_3byte_Esc_A5,
6889 /* 0x3b */ iemOp_Invalid,
6890 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6891 /* 0x3d */ iemOp_Invalid,
6892 /* 0x3e */ iemOp_Invalid,
6893 /* 0x3f */ iemOp_Invalid,
6894 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6895 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6896 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6897 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6898 /* 0x44 */ iemOp_cmove_Gv_Ev,
6899 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6900 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6901 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6902 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6903 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6904 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6905 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6906 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6907 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6908 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6909 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6910 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6911 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6912 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6913 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6914 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6915 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6916 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6917 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6918 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6919 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6920 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6921 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6922 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6923 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6924 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6925 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6926 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6927 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6928 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6929 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6930 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6931 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6932 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6933 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6934 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6935 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6936 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6937 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6938 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6939 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6940 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6941 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6942 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6943 /* 0x71 */ iemOp_Grp12,
6944 /* 0x72 */ iemOp_Grp13,
6945 /* 0x73 */ iemOp_Grp14,
6946 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6947 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6948 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6949 /* 0x77 */ iemOp_emms,
6950 /* 0x78 */ iemOp_vmread_AmdGrp17,
6951 /* 0x79 */ iemOp_vmwrite,
6952 /* 0x7a */ iemOp_Invalid,
6953 /* 0x7b */ iemOp_Invalid,
6954 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6955 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6956 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6957 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6958 /* 0x80 */ iemOp_jo_Jv,
6959 /* 0x81 */ iemOp_jno_Jv,
6960 /* 0x82 */ iemOp_jc_Jv,
6961 /* 0x83 */ iemOp_jnc_Jv,
6962 /* 0x84 */ iemOp_je_Jv,
6963 /* 0x85 */ iemOp_jne_Jv,
6964 /* 0x86 */ iemOp_jbe_Jv,
6965 /* 0x87 */ iemOp_jnbe_Jv,
6966 /* 0x88 */ iemOp_js_Jv,
6967 /* 0x89 */ iemOp_jns_Jv,
6968 /* 0x8a */ iemOp_jp_Jv,
6969 /* 0x8b */ iemOp_jnp_Jv,
6970 /* 0x8c */ iemOp_jl_Jv,
6971 /* 0x8d */ iemOp_jnl_Jv,
6972 /* 0x8e */ iemOp_jle_Jv,
6973 /* 0x8f */ iemOp_jnle_Jv,
6974 /* 0x90 */ iemOp_seto_Eb,
6975 /* 0x91 */ iemOp_setno_Eb,
6976 /* 0x92 */ iemOp_setc_Eb,
6977 /* 0x93 */ iemOp_setnc_Eb,
6978 /* 0x94 */ iemOp_sete_Eb,
6979 /* 0x95 */ iemOp_setne_Eb,
6980 /* 0x96 */ iemOp_setbe_Eb,
6981 /* 0x97 */ iemOp_setnbe_Eb,
6982 /* 0x98 */ iemOp_sets_Eb,
6983 /* 0x99 */ iemOp_setns_Eb,
6984 /* 0x9a */ iemOp_setp_Eb,
6985 /* 0x9b */ iemOp_setnp_Eb,
6986 /* 0x9c */ iemOp_setl_Eb,
6987 /* 0x9d */ iemOp_setnl_Eb,
6988 /* 0x9e */ iemOp_setle_Eb,
6989 /* 0x9f */ iemOp_setnle_Eb,
6990 /* 0xa0 */ iemOp_push_fs,
6991 /* 0xa1 */ iemOp_pop_fs,
6992 /* 0xa2 */ iemOp_cpuid,
6993 /* 0xa3 */ iemOp_bt_Ev_Gv,
6994 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6995 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6996 /* 0xa6 */ iemOp_Invalid,
6997 /* 0xa7 */ iemOp_Invalid,
6998 /* 0xa8 */ iemOp_push_gs,
6999 /* 0xa9 */ iemOp_pop_gs,
7000 /* 0xaa */ iemOp_rsm,
7001 /* 0xab */ iemOp_bts_Ev_Gv,
7002 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7003 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7004 /* 0xae */ iemOp_Grp15,
7005 /* 0xaf */ iemOp_imul_Gv_Ev,
7006 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7007 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7008 /* 0xb2 */ iemOp_lss_Gv_Mp,
7009 /* 0xb3 */ iemOp_btr_Ev_Gv,
7010 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7011 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7012 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7013 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7014 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7015 /* 0xb9 */ iemOp_Grp10,
7016 /* 0xba */ iemOp_Grp8,
7017 /* 0xbd */ iemOp_btc_Ev_Gv,
7018 /* 0xbc */ iemOp_bsf_Gv_Ev,
7019 /* 0xbd */ iemOp_bsr_Gv_Ev,
7020 /* 0xbe */ iemOp_movsx_Gv_Eb,
7021 /* 0xbf */ iemOp_movsx_Gv_Ew,
7022 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7023 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7024 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7025 /* 0xc3 */ iemOp_movnti_My_Gy,
7026 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7027 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7028 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7029 /* 0xc7 */ iemOp_Grp9,
7030 /* 0xc8 */ iemOp_bswap_rAX_r8,
7031 /* 0xc9 */ iemOp_bswap_rCX_r9,
7032 /* 0xca */ iemOp_bswap_rDX_r10,
7033 /* 0xcb */ iemOp_bswap_rBX_r11,
7034 /* 0xcc */ iemOp_bswap_rSP_r12,
7035 /* 0xcd */ iemOp_bswap_rBP_r13,
7036 /* 0xce */ iemOp_bswap_rSI_r14,
7037 /* 0xcf */ iemOp_bswap_rDI_r15,
7038 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7039 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7040 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7041 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7042 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7043 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7044 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7045 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7046 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7047 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7048 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7049 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7050 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7051 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7052 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7053 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7054 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7055 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7056 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7057 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7058 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7059 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7060 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7061 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7062 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7063 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7064 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7065 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7066 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7067 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7068 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7069 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7070 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7071 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7072 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7073 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7074 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7075 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7076 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7077 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7078 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7079 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7080 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7081 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7082 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7083 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7084 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7085 /* 0xff */ iemOp_Invalid
7086};
7087
7088/** @} */
7089
7090
7091/** @name One byte opcodes.
7092 *
7093 * @{
7094 */
7095
7096/** Opcode 0x00. */
7097FNIEMOP_DEF(iemOp_add_Eb_Gb)
7098{
7099 IEMOP_MNEMONIC("add Eb,Gb");
7100 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7101}
7102
7103
7104/** Opcode 0x01. */
7105FNIEMOP_DEF(iemOp_add_Ev_Gv)
7106{
7107 IEMOP_MNEMONIC("add Ev,Gv");
7108 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7109}
7110
7111
7112/** Opcode 0x02. */
7113FNIEMOP_DEF(iemOp_add_Gb_Eb)
7114{
7115 IEMOP_MNEMONIC("add Gb,Eb");
7116 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7117}
7118
7119
7120/** Opcode 0x03. */
7121FNIEMOP_DEF(iemOp_add_Gv_Ev)
7122{
7123 IEMOP_MNEMONIC("add Gv,Ev");
7124 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7125}
7126
7127
7128/** Opcode 0x04. */
7129FNIEMOP_DEF(iemOp_add_Al_Ib)
7130{
7131 IEMOP_MNEMONIC("add al,Ib");
7132 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7133}
7134
7135
7136/** Opcode 0x05. */
7137FNIEMOP_DEF(iemOp_add_eAX_Iz)
7138{
7139 IEMOP_MNEMONIC("add rAX,Iz");
7140 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7141}
7142
7143
7144/** Opcode 0x06. */
7145FNIEMOP_DEF(iemOp_push_ES)
7146{
7147 IEMOP_MNEMONIC("push es");
7148 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7149}
7150
7151
7152/** Opcode 0x07. */
7153FNIEMOP_DEF(iemOp_pop_ES)
7154{
7155 IEMOP_MNEMONIC("pop es");
7156 IEMOP_HLP_NO_64BIT();
7157 IEMOP_HLP_NO_LOCK_PREFIX();
7158 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7159}
7160
7161
7162/** Opcode 0x08. */
7163FNIEMOP_DEF(iemOp_or_Eb_Gb)
7164{
7165 IEMOP_MNEMONIC("or Eb,Gb");
7166 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7167 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7168}
7169
7170
7171/** Opcode 0x09. */
7172FNIEMOP_DEF(iemOp_or_Ev_Gv)
7173{
7174 IEMOP_MNEMONIC("or Ev,Gv ");
7175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7176 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7177}
7178
7179
7180/** Opcode 0x0a. */
7181FNIEMOP_DEF(iemOp_or_Gb_Eb)
7182{
7183 IEMOP_MNEMONIC("or Gb,Eb");
7184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7186}
7187
7188
7189/** Opcode 0x0b. */
7190FNIEMOP_DEF(iemOp_or_Gv_Ev)
7191{
7192 IEMOP_MNEMONIC("or Gv,Ev");
7193 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7194 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7195}
7196
7197
7198/** Opcode 0x0c. */
7199FNIEMOP_DEF(iemOp_or_Al_Ib)
7200{
7201 IEMOP_MNEMONIC("or al,Ib");
7202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7203 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7204}
7205
7206
7207/** Opcode 0x0d. */
7208FNIEMOP_DEF(iemOp_or_eAX_Iz)
7209{
7210 IEMOP_MNEMONIC("or rAX,Iz");
7211 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7212 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7213}
7214
7215
7216/** Opcode 0x0e. */
7217FNIEMOP_DEF(iemOp_push_CS)
7218{
7219 IEMOP_MNEMONIC("push cs");
7220 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7221}
7222
7223
7224/** Opcode 0x0f. */
7225FNIEMOP_DEF(iemOp_2byteEscape)
7226{
7227 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7228 /** @todo PUSH CS on 8086, undefined on 80186. */
7229 IEMOP_HLP_MIN_286();
7230 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7231}
7232
7233/** Opcode 0x10. */
7234FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7235{
7236 IEMOP_MNEMONIC("adc Eb,Gb");
7237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7238}
7239
7240
7241/** Opcode 0x11. */
7242FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7243{
7244 IEMOP_MNEMONIC("adc Ev,Gv");
7245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7246}
7247
7248
7249/** Opcode 0x12. */
7250FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7251{
7252 IEMOP_MNEMONIC("adc Gb,Eb");
7253 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7254}
7255
7256
7257/** Opcode 0x13. */
7258FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7259{
7260 IEMOP_MNEMONIC("adc Gv,Ev");
7261 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7262}
7263
7264
7265/** Opcode 0x14. */
7266FNIEMOP_DEF(iemOp_adc_Al_Ib)
7267{
7268 IEMOP_MNEMONIC("adc al,Ib");
7269 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7270}
7271
7272
7273/** Opcode 0x15. */
7274FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7275{
7276 IEMOP_MNEMONIC("adc rAX,Iz");
7277 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7278}
7279
7280
7281/** Opcode 0x16. */
7282FNIEMOP_DEF(iemOp_push_SS)
7283{
7284 IEMOP_MNEMONIC("push ss");
7285 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7286}
7287
7288
7289/** Opcode 0x17. */
7290FNIEMOP_DEF(iemOp_pop_SS)
7291{
7292 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7293 IEMOP_HLP_NO_LOCK_PREFIX();
7294 IEMOP_HLP_NO_64BIT();
7295 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7296}
7297
7298
7299/** Opcode 0x18. */
7300FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7301{
7302 IEMOP_MNEMONIC("sbb Eb,Gb");
7303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7304}
7305
7306
7307/** Opcode 0x19. */
7308FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7309{
7310 IEMOP_MNEMONIC("sbb Ev,Gv");
7311 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7312}
7313
7314
7315/** Opcode 0x1a. */
7316FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7317{
7318 IEMOP_MNEMONIC("sbb Gb,Eb");
7319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7320}
7321
7322
7323/** Opcode 0x1b. */
7324FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7325{
7326 IEMOP_MNEMONIC("sbb Gv,Ev");
7327 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7328}
7329
7330
7331/** Opcode 0x1c. */
7332FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7333{
7334 IEMOP_MNEMONIC("sbb al,Ib");
7335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7336}
7337
7338
7339/** Opcode 0x1d. */
7340FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7341{
7342 IEMOP_MNEMONIC("sbb rAX,Iz");
7343 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7344}
7345
7346
7347/** Opcode 0x1e. */
7348FNIEMOP_DEF(iemOp_push_DS)
7349{
7350 IEMOP_MNEMONIC("push ds");
7351 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7352}
7353
7354
7355/** Opcode 0x1f. */
7356FNIEMOP_DEF(iemOp_pop_DS)
7357{
7358 IEMOP_MNEMONIC("pop ds");
7359 IEMOP_HLP_NO_LOCK_PREFIX();
7360 IEMOP_HLP_NO_64BIT();
7361 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7362}
7363
7364
7365/** Opcode 0x20. */
7366FNIEMOP_DEF(iemOp_and_Eb_Gb)
7367{
7368 IEMOP_MNEMONIC("and Eb,Gb");
7369 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7370 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7371}
7372
7373
7374/** Opcode 0x21. */
7375FNIEMOP_DEF(iemOp_and_Ev_Gv)
7376{
7377 IEMOP_MNEMONIC("and Ev,Gv");
7378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7379 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7380}
7381
7382
7383/** Opcode 0x22. */
7384FNIEMOP_DEF(iemOp_and_Gb_Eb)
7385{
7386 IEMOP_MNEMONIC("and Gb,Eb");
7387 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7388 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7389}
7390
7391
7392/** Opcode 0x23. */
7393FNIEMOP_DEF(iemOp_and_Gv_Ev)
7394{
7395 IEMOP_MNEMONIC("and Gv,Ev");
7396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7397 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7398}
7399
7400
7401/** Opcode 0x24. */
7402FNIEMOP_DEF(iemOp_and_Al_Ib)
7403{
7404 IEMOP_MNEMONIC("and al,Ib");
7405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7406 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7407}
7408
7409
7410/** Opcode 0x25. */
7411FNIEMOP_DEF(iemOp_and_eAX_Iz)
7412{
7413 IEMOP_MNEMONIC("and rAX,Iz");
7414 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7415 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7416}
7417
7418
7419/** Opcode 0x26. */
7420FNIEMOP_DEF(iemOp_seg_ES)
7421{
7422 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7423 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7424 pIemCpu->iEffSeg = X86_SREG_ES;
7425
7426 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7427 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7428}
7429
7430
7431/** Opcode 0x27. */
7432FNIEMOP_DEF(iemOp_daa)
7433{
7434 IEMOP_MNEMONIC("daa AL");
7435 IEMOP_HLP_NO_64BIT();
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7438 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7439}
7440
7441
7442/** Opcode 0x28. */
7443FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7444{
7445 IEMOP_MNEMONIC("sub Eb,Gb");
7446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7447}
7448
7449
7450/** Opcode 0x29. */
7451FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7452{
7453 IEMOP_MNEMONIC("sub Ev,Gv");
7454 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7455}
7456
7457
7458/** Opcode 0x2a. */
7459FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7460{
7461 IEMOP_MNEMONIC("sub Gb,Eb");
7462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7463}
7464
7465
7466/** Opcode 0x2b. */
7467FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7468{
7469 IEMOP_MNEMONIC("sub Gv,Ev");
7470 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7471}
7472
7473
7474/** Opcode 0x2c. */
7475FNIEMOP_DEF(iemOp_sub_Al_Ib)
7476{
7477 IEMOP_MNEMONIC("sub al,Ib");
7478 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7479}
7480
7481
7482/** Opcode 0x2d. */
7483FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7484{
7485 IEMOP_MNEMONIC("sub rAX,Iz");
7486 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7487}
7488
7489
7490/** Opcode 0x2e. */
7491FNIEMOP_DEF(iemOp_seg_CS)
7492{
7493 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7494 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7495 pIemCpu->iEffSeg = X86_SREG_CS;
7496
7497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7499}
7500
7501
7502/** Opcode 0x2f. */
7503FNIEMOP_DEF(iemOp_das)
7504{
7505 IEMOP_MNEMONIC("das AL");
7506 IEMOP_HLP_NO_64BIT();
7507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7509 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7510}
7511
7512
7513/** Opcode 0x30. */
7514FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7515{
7516 IEMOP_MNEMONIC("xor Eb,Gb");
7517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7518 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7519}
7520
7521
7522/** Opcode 0x31. */
7523FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7524{
7525 IEMOP_MNEMONIC("xor Ev,Gv");
7526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7527 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7528}
7529
7530
7531/** Opcode 0x32. */
7532FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7533{
7534 IEMOP_MNEMONIC("xor Gb,Eb");
7535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7537}
7538
7539
7540/** Opcode 0x33. */
7541FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7542{
7543 IEMOP_MNEMONIC("xor Gv,Ev");
7544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7545 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7546}
7547
7548
7549/** Opcode 0x34. */
7550FNIEMOP_DEF(iemOp_xor_Al_Ib)
7551{
7552 IEMOP_MNEMONIC("xor al,Ib");
7553 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7554 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7555}
7556
7557
7558/** Opcode 0x35. */
7559FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7560{
7561 IEMOP_MNEMONIC("xor rAX,Iz");
7562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7563 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7564}
7565
7566
7567/** Opcode 0x36. */
7568FNIEMOP_DEF(iemOp_seg_SS)
7569{
7570 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7571 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7572 pIemCpu->iEffSeg = X86_SREG_SS;
7573
7574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7575 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7576}
7577
7578
7579/** Opcode 0x37. */
7580FNIEMOP_STUB(iemOp_aaa);
7581
7582
7583/** Opcode 0x38. */
7584FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7585{
7586 IEMOP_MNEMONIC("cmp Eb,Gb");
7587 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7588 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7589}
7590
7591
7592/** Opcode 0x39. */
7593FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7594{
7595 IEMOP_MNEMONIC("cmp Ev,Gv");
7596 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7597 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7598}
7599
7600
7601/** Opcode 0x3a. */
7602FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7603{
7604 IEMOP_MNEMONIC("cmp Gb,Eb");
7605 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7606}
7607
7608
7609/** Opcode 0x3b. */
7610FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7611{
7612 IEMOP_MNEMONIC("cmp Gv,Ev");
7613 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7614}
7615
7616
7617/** Opcode 0x3c. */
7618FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7619{
7620 IEMOP_MNEMONIC("cmp al,Ib");
7621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7622}
7623
7624
7625/** Opcode 0x3d. */
7626FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7627{
7628 IEMOP_MNEMONIC("cmp rAX,Iz");
7629 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7630}
7631
7632
7633/** Opcode 0x3e. */
7634FNIEMOP_DEF(iemOp_seg_DS)
7635{
7636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7637 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7638 pIemCpu->iEffSeg = X86_SREG_DS;
7639
7640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7642}
7643
7644
7645/** Opcode 0x3f. */
7646FNIEMOP_STUB(iemOp_aas);
7647
7648/**
7649 * Common 'inc/dec/not/neg register' helper.
7650 */
7651FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7652{
7653 IEMOP_HLP_NO_LOCK_PREFIX();
7654 switch (pIemCpu->enmEffOpSize)
7655 {
7656 case IEMMODE_16BIT:
7657 IEM_MC_BEGIN(2, 0);
7658 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7659 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7660 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7661 IEM_MC_REF_EFLAGS(pEFlags);
7662 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7663 IEM_MC_ADVANCE_RIP();
7664 IEM_MC_END();
7665 return VINF_SUCCESS;
7666
7667 case IEMMODE_32BIT:
7668 IEM_MC_BEGIN(2, 0);
7669 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7670 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7671 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7672 IEM_MC_REF_EFLAGS(pEFlags);
7673 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7674 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7675 IEM_MC_ADVANCE_RIP();
7676 IEM_MC_END();
7677 return VINF_SUCCESS;
7678
7679 case IEMMODE_64BIT:
7680 IEM_MC_BEGIN(2, 0);
7681 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7682 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7683 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7684 IEM_MC_REF_EFLAGS(pEFlags);
7685 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7686 IEM_MC_ADVANCE_RIP();
7687 IEM_MC_END();
7688 return VINF_SUCCESS;
7689 }
7690 return VINF_SUCCESS;
7691}
7692
7693
7694/** Opcode 0x40. */
7695FNIEMOP_DEF(iemOp_inc_eAX)
7696{
7697 /*
7698 * This is a REX prefix in 64-bit mode.
7699 */
7700 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7701 {
7702 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7703 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7704
7705 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7706 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7707 }
7708
7709 IEMOP_MNEMONIC("inc eAX");
7710 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7711}
7712
7713
7714/** Opcode 0x41. */
7715FNIEMOP_DEF(iemOp_inc_eCX)
7716{
7717 /*
7718 * This is a REX prefix in 64-bit mode.
7719 */
7720 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7721 {
7722 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7723 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7724 pIemCpu->uRexB = 1 << 3;
7725
7726 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7727 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7728 }
7729
7730 IEMOP_MNEMONIC("inc eCX");
7731 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7732}
7733
7734
7735/** Opcode 0x42. */
7736FNIEMOP_DEF(iemOp_inc_eDX)
7737{
7738 /*
7739 * This is a REX prefix in 64-bit mode.
7740 */
7741 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7742 {
7743 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7744 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7745 pIemCpu->uRexIndex = 1 << 3;
7746
7747 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7748 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7749 }
7750
7751 IEMOP_MNEMONIC("inc eDX");
7752 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7753}
7754
7755
7756
7757/** Opcode 0x43. */
7758FNIEMOP_DEF(iemOp_inc_eBX)
7759{
7760 /*
7761 * This is a REX prefix in 64-bit mode.
7762 */
7763 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7764 {
7765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7766 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7767 pIemCpu->uRexB = 1 << 3;
7768 pIemCpu->uRexIndex = 1 << 3;
7769
7770 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7771 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7772 }
7773
7774 IEMOP_MNEMONIC("inc eBX");
7775 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7776}
7777
7778
7779/** Opcode 0x44. */
7780FNIEMOP_DEF(iemOp_inc_eSP)
7781{
7782 /*
7783 * This is a REX prefix in 64-bit mode.
7784 */
7785 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7786 {
7787 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7788 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7789 pIemCpu->uRexReg = 1 << 3;
7790
7791 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7792 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7793 }
7794
7795 IEMOP_MNEMONIC("inc eSP");
7796 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7797}
7798
7799
7800/** Opcode 0x45. */
7801FNIEMOP_DEF(iemOp_inc_eBP)
7802{
7803 /*
7804 * This is a REX prefix in 64-bit mode.
7805 */
7806 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7807 {
7808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7809 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7810 pIemCpu->uRexReg = 1 << 3;
7811 pIemCpu->uRexB = 1 << 3;
7812
7813 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7814 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7815 }
7816
7817 IEMOP_MNEMONIC("inc eBP");
7818 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7819}
7820
7821
7822/** Opcode 0x46. */
7823FNIEMOP_DEF(iemOp_inc_eSI)
7824{
7825 /*
7826 * This is a REX prefix in 64-bit mode.
7827 */
7828 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7829 {
7830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7831 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7832 pIemCpu->uRexReg = 1 << 3;
7833 pIemCpu->uRexIndex = 1 << 3;
7834
7835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7837 }
7838
7839 IEMOP_MNEMONIC("inc eSI");
7840 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7841}
7842
7843
7844/** Opcode 0x47. */
7845FNIEMOP_DEF(iemOp_inc_eDI)
7846{
7847 /*
7848 * This is a REX prefix in 64-bit mode.
7849 */
7850 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7851 {
7852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7853 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7854 pIemCpu->uRexReg = 1 << 3;
7855 pIemCpu->uRexB = 1 << 3;
7856 pIemCpu->uRexIndex = 1 << 3;
7857
7858 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7859 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7860 }
7861
7862 IEMOP_MNEMONIC("inc eDI");
7863 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7864}
7865
7866
7867/** Opcode 0x48. */
7868FNIEMOP_DEF(iemOp_dec_eAX)
7869{
7870 /*
7871 * This is a REX prefix in 64-bit mode.
7872 */
7873 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7874 {
7875 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7876 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7877 iemRecalEffOpSize(pIemCpu);
7878
7879 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7880 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7881 }
7882
7883 IEMOP_MNEMONIC("dec eAX");
7884 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7885}
7886
7887
7888/** Opcode 0x49. */
7889FNIEMOP_DEF(iemOp_dec_eCX)
7890{
7891 /*
7892 * This is a REX prefix in 64-bit mode.
7893 */
7894 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7895 {
7896 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7897 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7898 pIemCpu->uRexB = 1 << 3;
7899 iemRecalEffOpSize(pIemCpu);
7900
7901 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7902 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7903 }
7904
7905 IEMOP_MNEMONIC("dec eCX");
7906 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7907}
7908
7909
7910/** Opcode 0x4a. */
7911FNIEMOP_DEF(iemOp_dec_eDX)
7912{
7913 /*
7914 * This is a REX prefix in 64-bit mode.
7915 */
7916 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7917 {
7918 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7919 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7920 pIemCpu->uRexIndex = 1 << 3;
7921 iemRecalEffOpSize(pIemCpu);
7922
7923 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7924 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7925 }
7926
7927 IEMOP_MNEMONIC("dec eDX");
7928 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7929}
7930
7931
7932/** Opcode 0x4b. */
7933FNIEMOP_DEF(iemOp_dec_eBX)
7934{
7935 /*
7936 * This is a REX prefix in 64-bit mode.
7937 */
7938 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7939 {
7940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7941 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7942 pIemCpu->uRexB = 1 << 3;
7943 pIemCpu->uRexIndex = 1 << 3;
7944 iemRecalEffOpSize(pIemCpu);
7945
7946 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7947 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7948 }
7949
7950 IEMOP_MNEMONIC("dec eBX");
7951 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7952}
7953
7954
7955/** Opcode 0x4c. */
7956FNIEMOP_DEF(iemOp_dec_eSP)
7957{
7958 /*
7959 * This is a REX prefix in 64-bit mode.
7960 */
7961 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7962 {
7963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7964 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7965 pIemCpu->uRexReg = 1 << 3;
7966 iemRecalEffOpSize(pIemCpu);
7967
7968 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7969 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7970 }
7971
7972 IEMOP_MNEMONIC("dec eSP");
7973 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7974}
7975
7976
7977/** Opcode 0x4d. */
7978FNIEMOP_DEF(iemOp_dec_eBP)
7979{
7980 /*
7981 * This is a REX prefix in 64-bit mode.
7982 */
7983 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7984 {
7985 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7986 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7987 pIemCpu->uRexReg = 1 << 3;
7988 pIemCpu->uRexB = 1 << 3;
7989 iemRecalEffOpSize(pIemCpu);
7990
7991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7992 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7993 }
7994
7995 IEMOP_MNEMONIC("dec eBP");
7996 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7997}
7998
7999
8000/** Opcode 0x4e. */
8001FNIEMOP_DEF(iemOp_dec_eSI)
8002{
8003 /*
8004 * This is a REX prefix in 64-bit mode.
8005 */
8006 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8007 {
8008 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8009 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8010 pIemCpu->uRexReg = 1 << 3;
8011 pIemCpu->uRexIndex = 1 << 3;
8012 iemRecalEffOpSize(pIemCpu);
8013
8014 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8015 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8016 }
8017
8018 IEMOP_MNEMONIC("dec eSI");
8019 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8020}
8021
8022
8023/** Opcode 0x4f. */
8024FNIEMOP_DEF(iemOp_dec_eDI)
8025{
8026 /*
8027 * This is a REX prefix in 64-bit mode.
8028 */
8029 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8030 {
8031 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8032 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8033 pIemCpu->uRexReg = 1 << 3;
8034 pIemCpu->uRexB = 1 << 3;
8035 pIemCpu->uRexIndex = 1 << 3;
8036 iemRecalEffOpSize(pIemCpu);
8037
8038 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8039 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8040 }
8041
8042 IEMOP_MNEMONIC("dec eDI");
8043 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8044}
8045
8046
8047/**
8048 * Common 'push register' helper.
8049 */
8050FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8051{
8052 IEMOP_HLP_NO_LOCK_PREFIX();
8053 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8054 {
8055 iReg |= pIemCpu->uRexB;
8056 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8057 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8058 }
8059
8060 switch (pIemCpu->enmEffOpSize)
8061 {
8062 case IEMMODE_16BIT:
8063 IEM_MC_BEGIN(0, 1);
8064 IEM_MC_LOCAL(uint16_t, u16Value);
8065 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8066 IEM_MC_PUSH_U16(u16Value);
8067 IEM_MC_ADVANCE_RIP();
8068 IEM_MC_END();
8069 break;
8070
8071 case IEMMODE_32BIT:
8072 IEM_MC_BEGIN(0, 1);
8073 IEM_MC_LOCAL(uint32_t, u32Value);
8074 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8075 IEM_MC_PUSH_U32(u32Value);
8076 IEM_MC_ADVANCE_RIP();
8077 IEM_MC_END();
8078 break;
8079
8080 case IEMMODE_64BIT:
8081 IEM_MC_BEGIN(0, 1);
8082 IEM_MC_LOCAL(uint64_t, u64Value);
8083 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8084 IEM_MC_PUSH_U64(u64Value);
8085 IEM_MC_ADVANCE_RIP();
8086 IEM_MC_END();
8087 break;
8088 }
8089
8090 return VINF_SUCCESS;
8091}
8092
8093
8094/** Opcode 0x50. */
8095FNIEMOP_DEF(iemOp_push_eAX)
8096{
8097 IEMOP_MNEMONIC("push rAX");
8098 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8099}
8100
8101
8102/** Opcode 0x51. */
8103FNIEMOP_DEF(iemOp_push_eCX)
8104{
8105 IEMOP_MNEMONIC("push rCX");
8106 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8107}
8108
8109
8110/** Opcode 0x52. */
8111FNIEMOP_DEF(iemOp_push_eDX)
8112{
8113 IEMOP_MNEMONIC("push rDX");
8114 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8115}
8116
8117
8118/** Opcode 0x53. */
8119FNIEMOP_DEF(iemOp_push_eBX)
8120{
8121 IEMOP_MNEMONIC("push rBX");
8122 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8123}
8124
8125
8126/** Opcode 0x54. */
8127FNIEMOP_DEF(iemOp_push_eSP)
8128{
8129 IEMOP_MNEMONIC("push rSP");
8130 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8131 {
8132 IEM_MC_BEGIN(0, 1);
8133 IEM_MC_LOCAL(uint16_t, u16Value);
8134 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8135 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8136 IEM_MC_PUSH_U16(u16Value);
8137 IEM_MC_ADVANCE_RIP();
8138 IEM_MC_END();
8139 }
8140 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8141}
8142
8143
8144/** Opcode 0x55. */
8145FNIEMOP_DEF(iemOp_push_eBP)
8146{
8147 IEMOP_MNEMONIC("push rBP");
8148 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8149}
8150
8151
8152/** Opcode 0x56. */
8153FNIEMOP_DEF(iemOp_push_eSI)
8154{
8155 IEMOP_MNEMONIC("push rSI");
8156 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8157}
8158
8159
8160/** Opcode 0x57. */
8161FNIEMOP_DEF(iemOp_push_eDI)
8162{
8163 IEMOP_MNEMONIC("push rDI");
8164 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8165}
8166
8167
8168/**
8169 * Common 'pop register' helper.
8170 */
8171FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8172{
8173 IEMOP_HLP_NO_LOCK_PREFIX();
8174 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8175 {
8176 iReg |= pIemCpu->uRexB;
8177 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8178 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8179 }
8180
8181 switch (pIemCpu->enmEffOpSize)
8182 {
8183 case IEMMODE_16BIT:
8184 IEM_MC_BEGIN(0, 1);
8185 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8186 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8187 IEM_MC_POP_U16(pu16Dst);
8188 IEM_MC_ADVANCE_RIP();
8189 IEM_MC_END();
8190 break;
8191
8192 case IEMMODE_32BIT:
8193 IEM_MC_BEGIN(0, 1);
8194 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8195 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8196 IEM_MC_POP_U32(pu32Dst);
8197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8198 IEM_MC_ADVANCE_RIP();
8199 IEM_MC_END();
8200 break;
8201
8202 case IEMMODE_64BIT:
8203 IEM_MC_BEGIN(0, 1);
8204 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8205 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8206 IEM_MC_POP_U64(pu64Dst);
8207 IEM_MC_ADVANCE_RIP();
8208 IEM_MC_END();
8209 break;
8210 }
8211
8212 return VINF_SUCCESS;
8213}
8214
8215
8216/** Opcode 0x58. */
8217FNIEMOP_DEF(iemOp_pop_eAX)
8218{
8219 IEMOP_MNEMONIC("pop rAX");
8220 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8221}
8222
8223
8224/** Opcode 0x59. */
8225FNIEMOP_DEF(iemOp_pop_eCX)
8226{
8227 IEMOP_MNEMONIC("pop rCX");
8228 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8229}
8230
8231
8232/** Opcode 0x5a. */
8233FNIEMOP_DEF(iemOp_pop_eDX)
8234{
8235 IEMOP_MNEMONIC("pop rDX");
8236 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8237}
8238
8239
8240/** Opcode 0x5b. */
8241FNIEMOP_DEF(iemOp_pop_eBX)
8242{
8243 IEMOP_MNEMONIC("pop rBX");
8244 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8245}
8246
8247
8248/** Opcode 0x5c. */
8249FNIEMOP_DEF(iemOp_pop_eSP)
8250{
8251 IEMOP_MNEMONIC("pop rSP");
8252 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8253 {
8254 if (pIemCpu->uRexB)
8255 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8256 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8257 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8258 }
8259
8260 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8261 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8262 /** @todo add testcase for this instruction. */
8263 switch (pIemCpu->enmEffOpSize)
8264 {
8265 case IEMMODE_16BIT:
8266 IEM_MC_BEGIN(0, 1);
8267 IEM_MC_LOCAL(uint16_t, u16Dst);
8268 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8269 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8270 IEM_MC_ADVANCE_RIP();
8271 IEM_MC_END();
8272 break;
8273
8274 case IEMMODE_32BIT:
8275 IEM_MC_BEGIN(0, 1);
8276 IEM_MC_LOCAL(uint32_t, u32Dst);
8277 IEM_MC_POP_U32(&u32Dst);
8278 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8279 IEM_MC_ADVANCE_RIP();
8280 IEM_MC_END();
8281 break;
8282
8283 case IEMMODE_64BIT:
8284 IEM_MC_BEGIN(0, 1);
8285 IEM_MC_LOCAL(uint64_t, u64Dst);
8286 IEM_MC_POP_U64(&u64Dst);
8287 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8288 IEM_MC_ADVANCE_RIP();
8289 IEM_MC_END();
8290 break;
8291 }
8292
8293 return VINF_SUCCESS;
8294}
8295
8296
8297/** Opcode 0x5d. */
8298FNIEMOP_DEF(iemOp_pop_eBP)
8299{
8300 IEMOP_MNEMONIC("pop rBP");
8301 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8302}
8303
8304
8305/** Opcode 0x5e. */
8306FNIEMOP_DEF(iemOp_pop_eSI)
8307{
8308 IEMOP_MNEMONIC("pop rSI");
8309 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8310}
8311
8312
8313/** Opcode 0x5f. */
8314FNIEMOP_DEF(iemOp_pop_eDI)
8315{
8316 IEMOP_MNEMONIC("pop rDI");
8317 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8318}
8319
8320
8321/** Opcode 0x60. */
8322FNIEMOP_DEF(iemOp_pusha)
8323{
8324 IEMOP_MNEMONIC("pusha");
8325 IEMOP_HLP_MIN_186();
8326 IEMOP_HLP_NO_64BIT();
8327 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8328 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8329 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8330 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8331}
8332
8333
8334/** Opcode 0x61. */
8335FNIEMOP_DEF(iemOp_popa)
8336{
8337 IEMOP_MNEMONIC("popa");
8338 IEMOP_HLP_MIN_186();
8339 IEMOP_HLP_NO_64BIT();
8340 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8341 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8342 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8343 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8344}
8345
8346
8347/** Opcode 0x62. */
8348FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8349// IEMOP_HLP_MIN_186();
8350
8351
8352/** Opcode 0x63 - non-64-bit modes. */
8353FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8354{
8355 IEMOP_MNEMONIC("arpl Ew,Gw");
8356 IEMOP_HLP_MIN_286();
8357 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8359
8360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8361 {
8362 /* Register */
8363 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8364 IEM_MC_BEGIN(3, 0);
8365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8366 IEM_MC_ARG(uint16_t, u16Src, 1);
8367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8368
8369 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8370 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8371 IEM_MC_REF_EFLAGS(pEFlags);
8372 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8373
8374 IEM_MC_ADVANCE_RIP();
8375 IEM_MC_END();
8376 }
8377 else
8378 {
8379 /* Memory */
8380 IEM_MC_BEGIN(3, 2);
8381 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8382 IEM_MC_ARG(uint16_t, u16Src, 1);
8383 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8385
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8388 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8389 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8390 IEM_MC_FETCH_EFLAGS(EFlags);
8391 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8392
8393 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8394 IEM_MC_COMMIT_EFLAGS(EFlags);
8395 IEM_MC_ADVANCE_RIP();
8396 IEM_MC_END();
8397 }
8398 return VINF_SUCCESS;
8399
8400}
8401
8402
8403/** Opcode 0x63.
8404 * @note This is a weird one. It works like a regular move instruction if
8405 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8406 * @todo This definitely needs a testcase to verify the odd cases. */
8407FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8408{
8409 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8410
8411 IEMOP_MNEMONIC("movsxd Gv,Ev");
8412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8413
8414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8415 {
8416 /*
8417 * Register to register.
8418 */
8419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8420 IEM_MC_BEGIN(0, 1);
8421 IEM_MC_LOCAL(uint64_t, u64Value);
8422 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8423 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8424 IEM_MC_ADVANCE_RIP();
8425 IEM_MC_END();
8426 }
8427 else
8428 {
8429 /*
8430 * We're loading a register from memory.
8431 */
8432 IEM_MC_BEGIN(0, 2);
8433 IEM_MC_LOCAL(uint64_t, u64Value);
8434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8437 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8438 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8439 IEM_MC_ADVANCE_RIP();
8440 IEM_MC_END();
8441 }
8442 return VINF_SUCCESS;
8443}
8444
8445
8446/** Opcode 0x64. */
8447FNIEMOP_DEF(iemOp_seg_FS)
8448{
8449 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8450 IEMOP_HLP_MIN_386();
8451
8452 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8453 pIemCpu->iEffSeg = X86_SREG_FS;
8454
8455 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8456 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8457}
8458
8459
8460/** Opcode 0x65. */
8461FNIEMOP_DEF(iemOp_seg_GS)
8462{
8463 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8464 IEMOP_HLP_MIN_386();
8465
8466 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8467 pIemCpu->iEffSeg = X86_SREG_GS;
8468
8469 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8470 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8471}
8472
8473
8474/** Opcode 0x66. */
8475FNIEMOP_DEF(iemOp_op_size)
8476{
8477 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8478 IEMOP_HLP_MIN_386();
8479
8480 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8481 iemRecalEffOpSize(pIemCpu);
8482
8483 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8484 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8485}
8486
8487
8488/** Opcode 0x67. */
8489FNIEMOP_DEF(iemOp_addr_size)
8490{
8491 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8492 IEMOP_HLP_MIN_386();
8493
8494 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8495 switch (pIemCpu->enmDefAddrMode)
8496 {
8497 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8498 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8499 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8500 default: AssertFailed();
8501 }
8502
8503 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8504 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8505}
8506
8507
8508/** Opcode 0x68. */
8509FNIEMOP_DEF(iemOp_push_Iz)
8510{
8511 IEMOP_MNEMONIC("push Iz");
8512 IEMOP_HLP_MIN_186();
8513 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8514 switch (pIemCpu->enmEffOpSize)
8515 {
8516 case IEMMODE_16BIT:
8517 {
8518 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8519 IEMOP_HLP_NO_LOCK_PREFIX();
8520 IEM_MC_BEGIN(0,0);
8521 IEM_MC_PUSH_U16(u16Imm);
8522 IEM_MC_ADVANCE_RIP();
8523 IEM_MC_END();
8524 return VINF_SUCCESS;
8525 }
8526
8527 case IEMMODE_32BIT:
8528 {
8529 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8530 IEMOP_HLP_NO_LOCK_PREFIX();
8531 IEM_MC_BEGIN(0,0);
8532 IEM_MC_PUSH_U32(u32Imm);
8533 IEM_MC_ADVANCE_RIP();
8534 IEM_MC_END();
8535 return VINF_SUCCESS;
8536 }
8537
8538 case IEMMODE_64BIT:
8539 {
8540 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8541 IEMOP_HLP_NO_LOCK_PREFIX();
8542 IEM_MC_BEGIN(0,0);
8543 IEM_MC_PUSH_U64(u64Imm);
8544 IEM_MC_ADVANCE_RIP();
8545 IEM_MC_END();
8546 return VINF_SUCCESS;
8547 }
8548
8549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8550 }
8551}
8552
8553
8554/** Opcode 0x69. */
8555FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8556{
8557 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8558 IEMOP_HLP_MIN_186();
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8561
8562 switch (pIemCpu->enmEffOpSize)
8563 {
8564 case IEMMODE_16BIT:
8565 {
8566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8567 {
8568 /* register operand */
8569 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571
8572 IEM_MC_BEGIN(3, 1);
8573 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8574 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8575 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8576 IEM_MC_LOCAL(uint16_t, u16Tmp);
8577
8578 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8579 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8580 IEM_MC_REF_EFLAGS(pEFlags);
8581 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8582 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8583
8584 IEM_MC_ADVANCE_RIP();
8585 IEM_MC_END();
8586 }
8587 else
8588 {
8589 /* memory operand */
8590 IEM_MC_BEGIN(3, 2);
8591 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8592 IEM_MC_ARG(uint16_t, u16Src, 1);
8593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8594 IEM_MC_LOCAL(uint16_t, u16Tmp);
8595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8596
8597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8598 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8599 IEM_MC_ASSIGN(u16Src, u16Imm);
8600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8601 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8602 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8603 IEM_MC_REF_EFLAGS(pEFlags);
8604 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8605 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8606
8607 IEM_MC_ADVANCE_RIP();
8608 IEM_MC_END();
8609 }
8610 return VINF_SUCCESS;
8611 }
8612
8613 case IEMMODE_32BIT:
8614 {
8615 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8616 {
8617 /* register operand */
8618 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8620
8621 IEM_MC_BEGIN(3, 1);
8622 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8623 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8624 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8625 IEM_MC_LOCAL(uint32_t, u32Tmp);
8626
8627 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8628 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8629 IEM_MC_REF_EFLAGS(pEFlags);
8630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8631 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8632
8633 IEM_MC_ADVANCE_RIP();
8634 IEM_MC_END();
8635 }
8636 else
8637 {
8638 /* memory operand */
8639 IEM_MC_BEGIN(3, 2);
8640 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8641 IEM_MC_ARG(uint32_t, u32Src, 1);
8642 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8643 IEM_MC_LOCAL(uint32_t, u32Tmp);
8644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8645
8646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8647 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8648 IEM_MC_ASSIGN(u32Src, u32Imm);
8649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8650 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8651 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8652 IEM_MC_REF_EFLAGS(pEFlags);
8653 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8654 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8655
8656 IEM_MC_ADVANCE_RIP();
8657 IEM_MC_END();
8658 }
8659 return VINF_SUCCESS;
8660 }
8661
8662 case IEMMODE_64BIT:
8663 {
8664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8665 {
8666 /* register operand */
8667 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669
8670 IEM_MC_BEGIN(3, 1);
8671 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8672 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8674 IEM_MC_LOCAL(uint64_t, u64Tmp);
8675
8676 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8677 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8678 IEM_MC_REF_EFLAGS(pEFlags);
8679 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8680 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8681
8682 IEM_MC_ADVANCE_RIP();
8683 IEM_MC_END();
8684 }
8685 else
8686 {
8687 /* memory operand */
8688 IEM_MC_BEGIN(3, 2);
8689 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8690 IEM_MC_ARG(uint64_t, u64Src, 1);
8691 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8692 IEM_MC_LOCAL(uint64_t, u64Tmp);
8693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8694
8695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8697 IEM_MC_ASSIGN(u64Src, u64Imm);
8698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8699 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8700 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8701 IEM_MC_REF_EFLAGS(pEFlags);
8702 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8703 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8704
8705 IEM_MC_ADVANCE_RIP();
8706 IEM_MC_END();
8707 }
8708 return VINF_SUCCESS;
8709 }
8710 }
8711 AssertFailedReturn(VERR_IEM_IPE_9);
8712}
8713
8714
8715/** Opcode 0x6a. */
8716FNIEMOP_DEF(iemOp_push_Ib)
8717{
8718 IEMOP_MNEMONIC("push Ib");
8719 IEMOP_HLP_MIN_186();
8720 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8721 IEMOP_HLP_NO_LOCK_PREFIX();
8722 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8723
8724 IEM_MC_BEGIN(0,0);
8725 switch (pIemCpu->enmEffOpSize)
8726 {
8727 case IEMMODE_16BIT:
8728 IEM_MC_PUSH_U16(i8Imm);
8729 break;
8730 case IEMMODE_32BIT:
8731 IEM_MC_PUSH_U32(i8Imm);
8732 break;
8733 case IEMMODE_64BIT:
8734 IEM_MC_PUSH_U64(i8Imm);
8735 break;
8736 }
8737 IEM_MC_ADVANCE_RIP();
8738 IEM_MC_END();
8739 return VINF_SUCCESS;
8740}
8741
8742
8743/** Opcode 0x6b. */
8744FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8745{
8746 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8747 IEMOP_HLP_MIN_186();
8748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8749 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8750
8751 switch (pIemCpu->enmEffOpSize)
8752 {
8753 case IEMMODE_16BIT:
8754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8755 {
8756 /* register operand */
8757 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759
8760 IEM_MC_BEGIN(3, 1);
8761 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8762 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8764 IEM_MC_LOCAL(uint16_t, u16Tmp);
8765
8766 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8767 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8768 IEM_MC_REF_EFLAGS(pEFlags);
8769 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8770 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8771
8772 IEM_MC_ADVANCE_RIP();
8773 IEM_MC_END();
8774 }
8775 else
8776 {
8777 /* memory operand */
8778 IEM_MC_BEGIN(3, 2);
8779 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8780 IEM_MC_ARG(uint16_t, u16Src, 1);
8781 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8782 IEM_MC_LOCAL(uint16_t, u16Tmp);
8783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8784
8785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8786 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8787 IEM_MC_ASSIGN(u16Src, u16Imm);
8788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8789 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8790 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8791 IEM_MC_REF_EFLAGS(pEFlags);
8792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8793 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8794
8795 IEM_MC_ADVANCE_RIP();
8796 IEM_MC_END();
8797 }
8798 return VINF_SUCCESS;
8799
8800 case IEMMODE_32BIT:
8801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8802 {
8803 /* register operand */
8804 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8806
8807 IEM_MC_BEGIN(3, 1);
8808 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8809 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8811 IEM_MC_LOCAL(uint32_t, u32Tmp);
8812
8813 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8814 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8815 IEM_MC_REF_EFLAGS(pEFlags);
8816 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8817 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8818
8819 IEM_MC_ADVANCE_RIP();
8820 IEM_MC_END();
8821 }
8822 else
8823 {
8824 /* memory operand */
8825 IEM_MC_BEGIN(3, 2);
8826 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8827 IEM_MC_ARG(uint32_t, u32Src, 1);
8828 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8829 IEM_MC_LOCAL(uint32_t, u32Tmp);
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8831
8832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8833 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8834 IEM_MC_ASSIGN(u32Src, u32Imm);
8835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8836 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8837 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8838 IEM_MC_REF_EFLAGS(pEFlags);
8839 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8840 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8841
8842 IEM_MC_ADVANCE_RIP();
8843 IEM_MC_END();
8844 }
8845 return VINF_SUCCESS;
8846
8847 case IEMMODE_64BIT:
8848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8849 {
8850 /* register operand */
8851 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8853
8854 IEM_MC_BEGIN(3, 1);
8855 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8856 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8858 IEM_MC_LOCAL(uint64_t, u64Tmp);
8859
8860 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8861 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8862 IEM_MC_REF_EFLAGS(pEFlags);
8863 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8864 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8865
8866 IEM_MC_ADVANCE_RIP();
8867 IEM_MC_END();
8868 }
8869 else
8870 {
8871 /* memory operand */
8872 IEM_MC_BEGIN(3, 2);
8873 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8874 IEM_MC_ARG(uint64_t, u64Src, 1);
8875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8876 IEM_MC_LOCAL(uint64_t, u64Tmp);
8877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8878
8879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8880 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8881 IEM_MC_ASSIGN(u64Src, u64Imm);
8882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8883 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8884 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8885 IEM_MC_REF_EFLAGS(pEFlags);
8886 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8887 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8888
8889 IEM_MC_ADVANCE_RIP();
8890 IEM_MC_END();
8891 }
8892 return VINF_SUCCESS;
8893 }
8894 AssertFailedReturn(VERR_IEM_IPE_8);
8895}
8896
8897
8898/** Opcode 0x6c. */
8899FNIEMOP_DEF(iemOp_insb_Yb_DX)
8900{
8901 IEMOP_HLP_MIN_186();
8902 IEMOP_HLP_NO_LOCK_PREFIX();
8903 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8904 {
8905 IEMOP_MNEMONIC("rep ins Yb,DX");
8906 switch (pIemCpu->enmEffAddrMode)
8907 {
8908 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8912 }
8913 }
8914 else
8915 {
8916 IEMOP_MNEMONIC("ins Yb,DX");
8917 switch (pIemCpu->enmEffAddrMode)
8918 {
8919 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8920 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8921 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8923 }
8924 }
8925}
8926
8927
8928/** Opcode 0x6d. */
8929FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8930{
8931 IEMOP_HLP_MIN_186();
8932 IEMOP_HLP_NO_LOCK_PREFIX();
8933 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8934 {
8935 IEMOP_MNEMONIC("rep ins Yv,DX");
8936 switch (pIemCpu->enmEffOpSize)
8937 {
8938 case IEMMODE_16BIT:
8939 switch (pIemCpu->enmEffAddrMode)
8940 {
8941 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8942 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8943 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8945 }
8946 break;
8947 case IEMMODE_64BIT:
8948 case IEMMODE_32BIT:
8949 switch (pIemCpu->enmEffAddrMode)
8950 {
8951 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8952 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8953 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8955 }
8956 break;
8957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8958 }
8959 }
8960 else
8961 {
8962 IEMOP_MNEMONIC("ins Yv,DX");
8963 switch (pIemCpu->enmEffOpSize)
8964 {
8965 case IEMMODE_16BIT:
8966 switch (pIemCpu->enmEffAddrMode)
8967 {
8968 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8972 }
8973 break;
8974 case IEMMODE_64BIT:
8975 case IEMMODE_32BIT:
8976 switch (pIemCpu->enmEffAddrMode)
8977 {
8978 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8979 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8980 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8982 }
8983 break;
8984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8985 }
8986 }
8987}
8988
8989
8990/** Opcode 0x6e. */
8991FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8992{
8993 IEMOP_HLP_MIN_186();
8994 IEMOP_HLP_NO_LOCK_PREFIX();
8995 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8996 {
8997 IEMOP_MNEMONIC("rep outs DX,Yb");
8998 switch (pIemCpu->enmEffAddrMode)
8999 {
9000 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9001 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9002 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9004 }
9005 }
9006 else
9007 {
9008 IEMOP_MNEMONIC("outs DX,Yb");
9009 switch (pIemCpu->enmEffAddrMode)
9010 {
9011 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9012 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9013 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9015 }
9016 }
9017}
9018
9019
9020/** Opcode 0x6f. */
9021FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9022{
9023 IEMOP_HLP_MIN_186();
9024 IEMOP_HLP_NO_LOCK_PREFIX();
9025 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9026 {
9027 IEMOP_MNEMONIC("rep outs DX,Yv");
9028 switch (pIemCpu->enmEffOpSize)
9029 {
9030 case IEMMODE_16BIT:
9031 switch (pIemCpu->enmEffAddrMode)
9032 {
9033 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9034 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9035 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9037 }
9038 break;
9039 case IEMMODE_64BIT:
9040 case IEMMODE_32BIT:
9041 switch (pIemCpu->enmEffAddrMode)
9042 {
9043 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9044 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9045 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9047 }
9048 break;
9049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9050 }
9051 }
9052 else
9053 {
9054 IEMOP_MNEMONIC("outs DX,Yv");
9055 switch (pIemCpu->enmEffOpSize)
9056 {
9057 case IEMMODE_16BIT:
9058 switch (pIemCpu->enmEffAddrMode)
9059 {
9060 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9061 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9062 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9064 }
9065 break;
9066 case IEMMODE_64BIT:
9067 case IEMMODE_32BIT:
9068 switch (pIemCpu->enmEffAddrMode)
9069 {
9070 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9071 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9072 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9074 }
9075 break;
9076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9077 }
9078 }
9079}
9080
9081
9082/** Opcode 0x70. */
9083FNIEMOP_DEF(iemOp_jo_Jb)
9084{
9085 IEMOP_MNEMONIC("jo Jb");
9086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9087 IEMOP_HLP_NO_LOCK_PREFIX();
9088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9089
9090 IEM_MC_BEGIN(0, 0);
9091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9092 IEM_MC_REL_JMP_S8(i8Imm);
9093 } IEM_MC_ELSE() {
9094 IEM_MC_ADVANCE_RIP();
9095 } IEM_MC_ENDIF();
9096 IEM_MC_END();
9097 return VINF_SUCCESS;
9098}
9099
9100
9101/** Opcode 0x71. */
9102FNIEMOP_DEF(iemOp_jno_Jb)
9103{
9104 IEMOP_MNEMONIC("jno Jb");
9105 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9106 IEMOP_HLP_NO_LOCK_PREFIX();
9107 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9108
9109 IEM_MC_BEGIN(0, 0);
9110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9111 IEM_MC_ADVANCE_RIP();
9112 } IEM_MC_ELSE() {
9113 IEM_MC_REL_JMP_S8(i8Imm);
9114 } IEM_MC_ENDIF();
9115 IEM_MC_END();
9116 return VINF_SUCCESS;
9117}
9118
9119/** Opcode 0x72. */
9120FNIEMOP_DEF(iemOp_jc_Jb)
9121{
9122 IEMOP_MNEMONIC("jc/jnae Jb");
9123 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9124 IEMOP_HLP_NO_LOCK_PREFIX();
9125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9126
9127 IEM_MC_BEGIN(0, 0);
9128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9129 IEM_MC_REL_JMP_S8(i8Imm);
9130 } IEM_MC_ELSE() {
9131 IEM_MC_ADVANCE_RIP();
9132 } IEM_MC_ENDIF();
9133 IEM_MC_END();
9134 return VINF_SUCCESS;
9135}
9136
9137
9138/** Opcode 0x73. */
9139FNIEMOP_DEF(iemOp_jnc_Jb)
9140{
9141 IEMOP_MNEMONIC("jnc/jnb Jb");
9142 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9143 IEMOP_HLP_NO_LOCK_PREFIX();
9144 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9145
9146 IEM_MC_BEGIN(0, 0);
9147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9148 IEM_MC_ADVANCE_RIP();
9149 } IEM_MC_ELSE() {
9150 IEM_MC_REL_JMP_S8(i8Imm);
9151 } IEM_MC_ENDIF();
9152 IEM_MC_END();
9153 return VINF_SUCCESS;
9154}
9155
9156
9157/** Opcode 0x74. */
9158FNIEMOP_DEF(iemOp_je_Jb)
9159{
9160 IEMOP_MNEMONIC("je/jz Jb");
9161 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9162 IEMOP_HLP_NO_LOCK_PREFIX();
9163 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9164
9165 IEM_MC_BEGIN(0, 0);
9166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9167 IEM_MC_REL_JMP_S8(i8Imm);
9168 } IEM_MC_ELSE() {
9169 IEM_MC_ADVANCE_RIP();
9170 } IEM_MC_ENDIF();
9171 IEM_MC_END();
9172 return VINF_SUCCESS;
9173}
9174
9175
9176/** Opcode 0x75. */
9177FNIEMOP_DEF(iemOp_jne_Jb)
9178{
9179 IEMOP_MNEMONIC("jne/jnz Jb");
9180 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9181 IEMOP_HLP_NO_LOCK_PREFIX();
9182 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9183
9184 IEM_MC_BEGIN(0, 0);
9185 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9186 IEM_MC_ADVANCE_RIP();
9187 } IEM_MC_ELSE() {
9188 IEM_MC_REL_JMP_S8(i8Imm);
9189 } IEM_MC_ENDIF();
9190 IEM_MC_END();
9191 return VINF_SUCCESS;
9192}
9193
9194
9195/** Opcode 0x76. */
9196FNIEMOP_DEF(iemOp_jbe_Jb)
9197{
9198 IEMOP_MNEMONIC("jbe/jna Jb");
9199 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9200 IEMOP_HLP_NO_LOCK_PREFIX();
9201 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9202
9203 IEM_MC_BEGIN(0, 0);
9204 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9205 IEM_MC_REL_JMP_S8(i8Imm);
9206 } IEM_MC_ELSE() {
9207 IEM_MC_ADVANCE_RIP();
9208 } IEM_MC_ENDIF();
9209 IEM_MC_END();
9210 return VINF_SUCCESS;
9211}
9212
9213
9214/** Opcode 0x77. */
9215FNIEMOP_DEF(iemOp_jnbe_Jb)
9216{
9217 IEMOP_MNEMONIC("jnbe/ja Jb");
9218 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9219 IEMOP_HLP_NO_LOCK_PREFIX();
9220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9221
9222 IEM_MC_BEGIN(0, 0);
9223 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9224 IEM_MC_ADVANCE_RIP();
9225 } IEM_MC_ELSE() {
9226 IEM_MC_REL_JMP_S8(i8Imm);
9227 } IEM_MC_ENDIF();
9228 IEM_MC_END();
9229 return VINF_SUCCESS;
9230}
9231
9232
9233/** Opcode 0x78. */
9234FNIEMOP_DEF(iemOp_js_Jb)
9235{
9236 IEMOP_MNEMONIC("js Jb");
9237 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9238 IEMOP_HLP_NO_LOCK_PREFIX();
9239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9240
9241 IEM_MC_BEGIN(0, 0);
9242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9243 IEM_MC_REL_JMP_S8(i8Imm);
9244 } IEM_MC_ELSE() {
9245 IEM_MC_ADVANCE_RIP();
9246 } IEM_MC_ENDIF();
9247 IEM_MC_END();
9248 return VINF_SUCCESS;
9249}
9250
9251
9252/** Opcode 0x79. */
9253FNIEMOP_DEF(iemOp_jns_Jb)
9254{
9255 IEMOP_MNEMONIC("jns Jb");
9256 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9257 IEMOP_HLP_NO_LOCK_PREFIX();
9258 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9259
9260 IEM_MC_BEGIN(0, 0);
9261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9262 IEM_MC_ADVANCE_RIP();
9263 } IEM_MC_ELSE() {
9264 IEM_MC_REL_JMP_S8(i8Imm);
9265 } IEM_MC_ENDIF();
9266 IEM_MC_END();
9267 return VINF_SUCCESS;
9268}
9269
9270
9271/** Opcode 0x7a. */
9272FNIEMOP_DEF(iemOp_jp_Jb)
9273{
9274 IEMOP_MNEMONIC("jp Jb");
9275 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9276 IEMOP_HLP_NO_LOCK_PREFIX();
9277 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9278
9279 IEM_MC_BEGIN(0, 0);
9280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9281 IEM_MC_REL_JMP_S8(i8Imm);
9282 } IEM_MC_ELSE() {
9283 IEM_MC_ADVANCE_RIP();
9284 } IEM_MC_ENDIF();
9285 IEM_MC_END();
9286 return VINF_SUCCESS;
9287}
9288
9289
9290/** Opcode 0x7b. */
9291FNIEMOP_DEF(iemOp_jnp_Jb)
9292{
9293 IEMOP_MNEMONIC("jnp Jb");
9294 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9295 IEMOP_HLP_NO_LOCK_PREFIX();
9296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9297
9298 IEM_MC_BEGIN(0, 0);
9299 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9300 IEM_MC_ADVANCE_RIP();
9301 } IEM_MC_ELSE() {
9302 IEM_MC_REL_JMP_S8(i8Imm);
9303 } IEM_MC_ENDIF();
9304 IEM_MC_END();
9305 return VINF_SUCCESS;
9306}
9307
9308
9309/** Opcode 0x7c. */
9310FNIEMOP_DEF(iemOp_jl_Jb)
9311{
9312 IEMOP_MNEMONIC("jl/jnge Jb");
9313 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9314 IEMOP_HLP_NO_LOCK_PREFIX();
9315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9316
9317 IEM_MC_BEGIN(0, 0);
9318 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9319 IEM_MC_REL_JMP_S8(i8Imm);
9320 } IEM_MC_ELSE() {
9321 IEM_MC_ADVANCE_RIP();
9322 } IEM_MC_ENDIF();
9323 IEM_MC_END();
9324 return VINF_SUCCESS;
9325}
9326
9327
9328/** Opcode 0x7d. */
9329FNIEMOP_DEF(iemOp_jnl_Jb)
9330{
9331 IEMOP_MNEMONIC("jnl/jge Jb");
9332 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9333 IEMOP_HLP_NO_LOCK_PREFIX();
9334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9335
9336 IEM_MC_BEGIN(0, 0);
9337 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9338 IEM_MC_ADVANCE_RIP();
9339 } IEM_MC_ELSE() {
9340 IEM_MC_REL_JMP_S8(i8Imm);
9341 } IEM_MC_ENDIF();
9342 IEM_MC_END();
9343 return VINF_SUCCESS;
9344}
9345
9346
9347/** Opcode 0x7e. */
9348FNIEMOP_DEF(iemOp_jle_Jb)
9349{
9350 IEMOP_MNEMONIC("jle/jng Jb");
9351 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9352 IEMOP_HLP_NO_LOCK_PREFIX();
9353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9354
9355 IEM_MC_BEGIN(0, 0);
9356 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9357 IEM_MC_REL_JMP_S8(i8Imm);
9358 } IEM_MC_ELSE() {
9359 IEM_MC_ADVANCE_RIP();
9360 } IEM_MC_ENDIF();
9361 IEM_MC_END();
9362 return VINF_SUCCESS;
9363}
9364
9365
9366/** Opcode 0x7f. */
9367FNIEMOP_DEF(iemOp_jnle_Jb)
9368{
9369 IEMOP_MNEMONIC("jnle/jg Jb");
9370 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9371 IEMOP_HLP_NO_LOCK_PREFIX();
9372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9373
9374 IEM_MC_BEGIN(0, 0);
9375 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9376 IEM_MC_ADVANCE_RIP();
9377 } IEM_MC_ELSE() {
9378 IEM_MC_REL_JMP_S8(i8Imm);
9379 } IEM_MC_ENDIF();
9380 IEM_MC_END();
9381 return VINF_SUCCESS;
9382}
9383
9384
9385/** Opcode 0x80. */
9386FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9387{
9388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9389 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9390 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9391
9392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9393 {
9394 /* register target */
9395 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9396 IEMOP_HLP_NO_LOCK_PREFIX();
9397 IEM_MC_BEGIN(3, 0);
9398 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9399 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9400 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9401
9402 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9403 IEM_MC_REF_EFLAGS(pEFlags);
9404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9405
9406 IEM_MC_ADVANCE_RIP();
9407 IEM_MC_END();
9408 }
9409 else
9410 {
9411 /* memory target */
9412 uint32_t fAccess;
9413 if (pImpl->pfnLockedU8)
9414 fAccess = IEM_ACCESS_DATA_RW;
9415 else
9416 { /* CMP */
9417 IEMOP_HLP_NO_LOCK_PREFIX();
9418 fAccess = IEM_ACCESS_DATA_R;
9419 }
9420 IEM_MC_BEGIN(3, 2);
9421 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9424
9425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9426 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9427 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9428
9429 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9430 IEM_MC_FETCH_EFLAGS(EFlags);
9431 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9433 else
9434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9435
9436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9437 IEM_MC_COMMIT_EFLAGS(EFlags);
9438 IEM_MC_ADVANCE_RIP();
9439 IEM_MC_END();
9440 }
9441 return VINF_SUCCESS;
9442}
9443
9444
9445/** Opcode 0x81. */
9446FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9447{
9448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9449 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9450 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9451
9452 switch (pIemCpu->enmEffOpSize)
9453 {
9454 case IEMMODE_16BIT:
9455 {
9456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9457 {
9458 /* register target */
9459 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9460 IEMOP_HLP_NO_LOCK_PREFIX();
9461 IEM_MC_BEGIN(3, 0);
9462 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9463 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9464 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9465
9466 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9467 IEM_MC_REF_EFLAGS(pEFlags);
9468 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9469
9470 IEM_MC_ADVANCE_RIP();
9471 IEM_MC_END();
9472 }
9473 else
9474 {
9475 /* memory target */
9476 uint32_t fAccess;
9477 if (pImpl->pfnLockedU16)
9478 fAccess = IEM_ACCESS_DATA_RW;
9479 else
9480 { /* CMP, TEST */
9481 IEMOP_HLP_NO_LOCK_PREFIX();
9482 fAccess = IEM_ACCESS_DATA_R;
9483 }
9484 IEM_MC_BEGIN(3, 2);
9485 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9486 IEM_MC_ARG(uint16_t, u16Src, 1);
9487 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9489
9490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9491 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9492 IEM_MC_ASSIGN(u16Src, u16Imm);
9493 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9494 IEM_MC_FETCH_EFLAGS(EFlags);
9495 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9497 else
9498 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9499
9500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9501 IEM_MC_COMMIT_EFLAGS(EFlags);
9502 IEM_MC_ADVANCE_RIP();
9503 IEM_MC_END();
9504 }
9505 break;
9506 }
9507
9508 case IEMMODE_32BIT:
9509 {
9510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9511 {
9512 /* register target */
9513 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9514 IEMOP_HLP_NO_LOCK_PREFIX();
9515 IEM_MC_BEGIN(3, 0);
9516 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9517 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9519
9520 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9521 IEM_MC_REF_EFLAGS(pEFlags);
9522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9523 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9524
9525 IEM_MC_ADVANCE_RIP();
9526 IEM_MC_END();
9527 }
9528 else
9529 {
9530 /* memory target */
9531 uint32_t fAccess;
9532 if (pImpl->pfnLockedU32)
9533 fAccess = IEM_ACCESS_DATA_RW;
9534 else
9535 { /* CMP, TEST */
9536 IEMOP_HLP_NO_LOCK_PREFIX();
9537 fAccess = IEM_ACCESS_DATA_R;
9538 }
9539 IEM_MC_BEGIN(3, 2);
9540 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9541 IEM_MC_ARG(uint32_t, u32Src, 1);
9542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9544
9545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9546 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9547 IEM_MC_ASSIGN(u32Src, u32Imm);
9548 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9549 IEM_MC_FETCH_EFLAGS(EFlags);
9550 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9551 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9552 else
9553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9554
9555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9556 IEM_MC_COMMIT_EFLAGS(EFlags);
9557 IEM_MC_ADVANCE_RIP();
9558 IEM_MC_END();
9559 }
9560 break;
9561 }
9562
9563 case IEMMODE_64BIT:
9564 {
9565 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9566 {
9567 /* register target */
9568 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9569 IEMOP_HLP_NO_LOCK_PREFIX();
9570 IEM_MC_BEGIN(3, 0);
9571 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9572 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9574
9575 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9576 IEM_MC_REF_EFLAGS(pEFlags);
9577 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9578
9579 IEM_MC_ADVANCE_RIP();
9580 IEM_MC_END();
9581 }
9582 else
9583 {
9584 /* memory target */
9585 uint32_t fAccess;
9586 if (pImpl->pfnLockedU64)
9587 fAccess = IEM_ACCESS_DATA_RW;
9588 else
9589 { /* CMP */
9590 IEMOP_HLP_NO_LOCK_PREFIX();
9591 fAccess = IEM_ACCESS_DATA_R;
9592 }
9593 IEM_MC_BEGIN(3, 2);
9594 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9595 IEM_MC_ARG(uint64_t, u64Src, 1);
9596 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9598
9599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9600 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9601 IEM_MC_ASSIGN(u64Src, u64Imm);
9602 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9603 IEM_MC_FETCH_EFLAGS(EFlags);
9604 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9606 else
9607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9608
9609 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9610 IEM_MC_COMMIT_EFLAGS(EFlags);
9611 IEM_MC_ADVANCE_RIP();
9612 IEM_MC_END();
9613 }
9614 break;
9615 }
9616 }
9617 return VINF_SUCCESS;
9618}
9619
9620
9621/** Opcode 0x82. */
9622FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9623{
9624 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9625 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9626}
9627
9628
9629/** Opcode 0x83. */
9630FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9631{
9632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9633 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9634 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9635 to the 386 even if absent in the intel reference manuals and some
9636 3rd party opcode listings. */
9637 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9638
9639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9640 {
9641 /*
9642 * Register target
9643 */
9644 IEMOP_HLP_NO_LOCK_PREFIX();
9645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9646 switch (pIemCpu->enmEffOpSize)
9647 {
9648 case IEMMODE_16BIT:
9649 {
9650 IEM_MC_BEGIN(3, 0);
9651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9652 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9653 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9654
9655 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9656 IEM_MC_REF_EFLAGS(pEFlags);
9657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9658
9659 IEM_MC_ADVANCE_RIP();
9660 IEM_MC_END();
9661 break;
9662 }
9663
9664 case IEMMODE_32BIT:
9665 {
9666 IEM_MC_BEGIN(3, 0);
9667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9668 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9670
9671 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9672 IEM_MC_REF_EFLAGS(pEFlags);
9673 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9674 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9675
9676 IEM_MC_ADVANCE_RIP();
9677 IEM_MC_END();
9678 break;
9679 }
9680
9681 case IEMMODE_64BIT:
9682 {
9683 IEM_MC_BEGIN(3, 0);
9684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9685 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9687
9688 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9689 IEM_MC_REF_EFLAGS(pEFlags);
9690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9691
9692 IEM_MC_ADVANCE_RIP();
9693 IEM_MC_END();
9694 break;
9695 }
9696 }
9697 }
9698 else
9699 {
9700 /*
9701 * Memory target.
9702 */
9703 uint32_t fAccess;
9704 if (pImpl->pfnLockedU16)
9705 fAccess = IEM_ACCESS_DATA_RW;
9706 else
9707 { /* CMP */
9708 IEMOP_HLP_NO_LOCK_PREFIX();
9709 fAccess = IEM_ACCESS_DATA_R;
9710 }
9711
9712 switch (pIemCpu->enmEffOpSize)
9713 {
9714 case IEMMODE_16BIT:
9715 {
9716 IEM_MC_BEGIN(3, 2);
9717 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9718 IEM_MC_ARG(uint16_t, u16Src, 1);
9719 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9721
9722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9723 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9724 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9725 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9726 IEM_MC_FETCH_EFLAGS(EFlags);
9727 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9729 else
9730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9731
9732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9733 IEM_MC_COMMIT_EFLAGS(EFlags);
9734 IEM_MC_ADVANCE_RIP();
9735 IEM_MC_END();
9736 break;
9737 }
9738
9739 case IEMMODE_32BIT:
9740 {
9741 IEM_MC_BEGIN(3, 2);
9742 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9743 IEM_MC_ARG(uint32_t, u32Src, 1);
9744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9746
9747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9748 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9749 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9750 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9751 IEM_MC_FETCH_EFLAGS(EFlags);
9752 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9754 else
9755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9756
9757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9758 IEM_MC_COMMIT_EFLAGS(EFlags);
9759 IEM_MC_ADVANCE_RIP();
9760 IEM_MC_END();
9761 break;
9762 }
9763
9764 case IEMMODE_64BIT:
9765 {
9766 IEM_MC_BEGIN(3, 2);
9767 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9768 IEM_MC_ARG(uint64_t, u64Src, 1);
9769 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9771
9772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9773 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9774 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9775 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9776 IEM_MC_FETCH_EFLAGS(EFlags);
9777 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9779 else
9780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9781
9782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9783 IEM_MC_COMMIT_EFLAGS(EFlags);
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 break;
9787 }
9788 }
9789 }
9790 return VINF_SUCCESS;
9791}
9792
9793
9794/** Opcode 0x84. */
9795FNIEMOP_DEF(iemOp_test_Eb_Gb)
9796{
9797 IEMOP_MNEMONIC("test Eb,Gb");
9798 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9801}
9802
9803
9804/** Opcode 0x85. */
9805FNIEMOP_DEF(iemOp_test_Ev_Gv)
9806{
9807 IEMOP_MNEMONIC("test Ev,Gv");
9808 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9811}
9812
9813
9814/** Opcode 0x86. */
9815FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9816{
9817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9818 IEMOP_MNEMONIC("xchg Eb,Gb");
9819
9820 /*
9821 * If rm is denoting a register, no more instruction bytes.
9822 */
9823 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9824 {
9825 IEMOP_HLP_NO_LOCK_PREFIX();
9826
9827 IEM_MC_BEGIN(0, 2);
9828 IEM_MC_LOCAL(uint8_t, uTmp1);
9829 IEM_MC_LOCAL(uint8_t, uTmp2);
9830
9831 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9832 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9833 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9834 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9835
9836 IEM_MC_ADVANCE_RIP();
9837 IEM_MC_END();
9838 }
9839 else
9840 {
9841 /*
9842 * We're accessing memory.
9843 */
9844/** @todo the register must be committed separately! */
9845 IEM_MC_BEGIN(2, 2);
9846 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9847 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9849
9850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9851 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9852 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9853 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9855
9856 IEM_MC_ADVANCE_RIP();
9857 IEM_MC_END();
9858 }
9859 return VINF_SUCCESS;
9860}
9861
9862
9863/** Opcode 0x87. */
9864FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9865{
9866 IEMOP_MNEMONIC("xchg Ev,Gv");
9867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9868
9869 /*
9870 * If rm is denoting a register, no more instruction bytes.
9871 */
9872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9873 {
9874 IEMOP_HLP_NO_LOCK_PREFIX();
9875
9876 switch (pIemCpu->enmEffOpSize)
9877 {
9878 case IEMMODE_16BIT:
9879 IEM_MC_BEGIN(0, 2);
9880 IEM_MC_LOCAL(uint16_t, uTmp1);
9881 IEM_MC_LOCAL(uint16_t, uTmp2);
9882
9883 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9884 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9885 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9886 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9887
9888 IEM_MC_ADVANCE_RIP();
9889 IEM_MC_END();
9890 return VINF_SUCCESS;
9891
9892 case IEMMODE_32BIT:
9893 IEM_MC_BEGIN(0, 2);
9894 IEM_MC_LOCAL(uint32_t, uTmp1);
9895 IEM_MC_LOCAL(uint32_t, uTmp2);
9896
9897 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9898 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9899 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9900 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9901
9902 IEM_MC_ADVANCE_RIP();
9903 IEM_MC_END();
9904 return VINF_SUCCESS;
9905
9906 case IEMMODE_64BIT:
9907 IEM_MC_BEGIN(0, 2);
9908 IEM_MC_LOCAL(uint64_t, uTmp1);
9909 IEM_MC_LOCAL(uint64_t, uTmp2);
9910
9911 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9912 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9913 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9914 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9915
9916 IEM_MC_ADVANCE_RIP();
9917 IEM_MC_END();
9918 return VINF_SUCCESS;
9919
9920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9921 }
9922 }
9923 else
9924 {
9925 /*
9926 * We're accessing memory.
9927 */
9928 switch (pIemCpu->enmEffOpSize)
9929 {
9930/** @todo the register must be committed separately! */
9931 case IEMMODE_16BIT:
9932 IEM_MC_BEGIN(2, 2);
9933 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9934 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9936
9937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9938 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9939 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9940 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9942
9943 IEM_MC_ADVANCE_RIP();
9944 IEM_MC_END();
9945 return VINF_SUCCESS;
9946
9947 case IEMMODE_32BIT:
9948 IEM_MC_BEGIN(2, 2);
9949 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9950 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9952
9953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9954 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9955 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9956 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9958
9959 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9960 IEM_MC_ADVANCE_RIP();
9961 IEM_MC_END();
9962 return VINF_SUCCESS;
9963
9964 case IEMMODE_64BIT:
9965 IEM_MC_BEGIN(2, 2);
9966 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9967 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9969
9970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9971 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9972 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9973 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9974 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9975
9976 IEM_MC_ADVANCE_RIP();
9977 IEM_MC_END();
9978 return VINF_SUCCESS;
9979
9980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9981 }
9982 }
9983}
9984
9985
9986/** Opcode 0x88. */
9987FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9988{
9989 IEMOP_MNEMONIC("mov Eb,Gb");
9990
9991 uint8_t bRm;
9992 IEM_OPCODE_GET_NEXT_U8(&bRm);
9993 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9994
9995 /*
9996 * If rm is denoting a register, no more instruction bytes.
9997 */
9998 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9999 {
10000 IEM_MC_BEGIN(0, 1);
10001 IEM_MC_LOCAL(uint8_t, u8Value);
10002 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10003 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10004 IEM_MC_ADVANCE_RIP();
10005 IEM_MC_END();
10006 }
10007 else
10008 {
10009 /*
10010 * We're writing a register to memory.
10011 */
10012 IEM_MC_BEGIN(0, 2);
10013 IEM_MC_LOCAL(uint8_t, u8Value);
10014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10016 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10017 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10018 IEM_MC_ADVANCE_RIP();
10019 IEM_MC_END();
10020 }
10021 return VINF_SUCCESS;
10022
10023}
10024
10025
10026/** Opcode 0x89. */
10027FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10028{
10029 IEMOP_MNEMONIC("mov Ev,Gv");
10030
10031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10032 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10033
10034 /*
10035 * If rm is denoting a register, no more instruction bytes.
10036 */
10037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10038 {
10039 switch (pIemCpu->enmEffOpSize)
10040 {
10041 case IEMMODE_16BIT:
10042 IEM_MC_BEGIN(0, 1);
10043 IEM_MC_LOCAL(uint16_t, u16Value);
10044 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10045 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10046 IEM_MC_ADVANCE_RIP();
10047 IEM_MC_END();
10048 break;
10049
10050 case IEMMODE_32BIT:
10051 IEM_MC_BEGIN(0, 1);
10052 IEM_MC_LOCAL(uint32_t, u32Value);
10053 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10054 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10055 IEM_MC_ADVANCE_RIP();
10056 IEM_MC_END();
10057 break;
10058
10059 case IEMMODE_64BIT:
10060 IEM_MC_BEGIN(0, 1);
10061 IEM_MC_LOCAL(uint64_t, u64Value);
10062 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10063 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10064 IEM_MC_ADVANCE_RIP();
10065 IEM_MC_END();
10066 break;
10067 }
10068 }
10069 else
10070 {
10071 /*
10072 * We're writing a register to memory.
10073 */
10074 switch (pIemCpu->enmEffOpSize)
10075 {
10076 case IEMMODE_16BIT:
10077 IEM_MC_BEGIN(0, 2);
10078 IEM_MC_LOCAL(uint16_t, u16Value);
10079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10081 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10082 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10083 IEM_MC_ADVANCE_RIP();
10084 IEM_MC_END();
10085 break;
10086
10087 case IEMMODE_32BIT:
10088 IEM_MC_BEGIN(0, 2);
10089 IEM_MC_LOCAL(uint32_t, u32Value);
10090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10092 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10093 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10094 IEM_MC_ADVANCE_RIP();
10095 IEM_MC_END();
10096 break;
10097
10098 case IEMMODE_64BIT:
10099 IEM_MC_BEGIN(0, 2);
10100 IEM_MC_LOCAL(uint64_t, u64Value);
10101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10103 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10104 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10105 IEM_MC_ADVANCE_RIP();
10106 IEM_MC_END();
10107 break;
10108 }
10109 }
10110 return VINF_SUCCESS;
10111}
10112
10113
10114/** Opcode 0x8a. */
10115FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10116{
10117 IEMOP_MNEMONIC("mov Gb,Eb");
10118
10119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10120 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10121
10122 /*
10123 * If rm is denoting a register, no more instruction bytes.
10124 */
10125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10126 {
10127 IEM_MC_BEGIN(0, 1);
10128 IEM_MC_LOCAL(uint8_t, u8Value);
10129 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10130 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10131 IEM_MC_ADVANCE_RIP();
10132 IEM_MC_END();
10133 }
10134 else
10135 {
10136 /*
10137 * We're loading a register from memory.
10138 */
10139 IEM_MC_BEGIN(0, 2);
10140 IEM_MC_LOCAL(uint8_t, u8Value);
10141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10143 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10144 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10145 IEM_MC_ADVANCE_RIP();
10146 IEM_MC_END();
10147 }
10148 return VINF_SUCCESS;
10149}
10150
10151
10152/** Opcode 0x8b. */
10153FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10154{
10155 IEMOP_MNEMONIC("mov Gv,Ev");
10156
10157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10158 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10159
10160 /*
10161 * If rm is denoting a register, no more instruction bytes.
10162 */
10163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10164 {
10165 switch (pIemCpu->enmEffOpSize)
10166 {
10167 case IEMMODE_16BIT:
10168 IEM_MC_BEGIN(0, 1);
10169 IEM_MC_LOCAL(uint16_t, u16Value);
10170 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10171 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10172 IEM_MC_ADVANCE_RIP();
10173 IEM_MC_END();
10174 break;
10175
10176 case IEMMODE_32BIT:
10177 IEM_MC_BEGIN(0, 1);
10178 IEM_MC_LOCAL(uint32_t, u32Value);
10179 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10180 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10181 IEM_MC_ADVANCE_RIP();
10182 IEM_MC_END();
10183 break;
10184
10185 case IEMMODE_64BIT:
10186 IEM_MC_BEGIN(0, 1);
10187 IEM_MC_LOCAL(uint64_t, u64Value);
10188 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10189 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10190 IEM_MC_ADVANCE_RIP();
10191 IEM_MC_END();
10192 break;
10193 }
10194 }
10195 else
10196 {
10197 /*
10198 * We're loading a register from memory.
10199 */
10200 switch (pIemCpu->enmEffOpSize)
10201 {
10202 case IEMMODE_16BIT:
10203 IEM_MC_BEGIN(0, 2);
10204 IEM_MC_LOCAL(uint16_t, u16Value);
10205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10207 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10208 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10209 IEM_MC_ADVANCE_RIP();
10210 IEM_MC_END();
10211 break;
10212
10213 case IEMMODE_32BIT:
10214 IEM_MC_BEGIN(0, 2);
10215 IEM_MC_LOCAL(uint32_t, u32Value);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10218 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10219 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10220 IEM_MC_ADVANCE_RIP();
10221 IEM_MC_END();
10222 break;
10223
10224 case IEMMODE_64BIT:
10225 IEM_MC_BEGIN(0, 2);
10226 IEM_MC_LOCAL(uint64_t, u64Value);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10229 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10230 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10231 IEM_MC_ADVANCE_RIP();
10232 IEM_MC_END();
10233 break;
10234 }
10235 }
10236 return VINF_SUCCESS;
10237}
10238
10239
10240/** Opcode 0x63. */
10241FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10242{
10243 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10244 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10245 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10246 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10247 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10248}
10249
10250
10251/** Opcode 0x8c. */
10252FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10253{
10254 IEMOP_MNEMONIC("mov Ev,Sw");
10255
10256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10257 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10258
10259 /*
10260 * Check that the destination register exists. The REX.R prefix is ignored.
10261 */
10262 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10263 if ( iSegReg > X86_SREG_GS)
10264 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10265
10266 /*
10267 * If rm is denoting a register, no more instruction bytes.
10268 * In that case, the operand size is respected and the upper bits are
10269 * cleared (starting with some pentium).
10270 */
10271 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10272 {
10273 switch (pIemCpu->enmEffOpSize)
10274 {
10275 case IEMMODE_16BIT:
10276 IEM_MC_BEGIN(0, 1);
10277 IEM_MC_LOCAL(uint16_t, u16Value);
10278 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10279 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10280 IEM_MC_ADVANCE_RIP();
10281 IEM_MC_END();
10282 break;
10283
10284 case IEMMODE_32BIT:
10285 IEM_MC_BEGIN(0, 1);
10286 IEM_MC_LOCAL(uint32_t, u32Value);
10287 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10288 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10289 IEM_MC_ADVANCE_RIP();
10290 IEM_MC_END();
10291 break;
10292
10293 case IEMMODE_64BIT:
10294 IEM_MC_BEGIN(0, 1);
10295 IEM_MC_LOCAL(uint64_t, u64Value);
10296 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10297 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10298 IEM_MC_ADVANCE_RIP();
10299 IEM_MC_END();
10300 break;
10301 }
10302 }
10303 else
10304 {
10305 /*
10306 * We're saving the register to memory. The access is word sized
10307 * regardless of operand size prefixes.
10308 */
10309#if 0 /* not necessary */
10310 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10311#endif
10312 IEM_MC_BEGIN(0, 2);
10313 IEM_MC_LOCAL(uint16_t, u16Value);
10314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10316 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10317 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10318 IEM_MC_ADVANCE_RIP();
10319 IEM_MC_END();
10320 }
10321 return VINF_SUCCESS;
10322}
10323
10324
10325
10326
10327/** Opcode 0x8d. */
10328FNIEMOP_DEF(iemOp_lea_Gv_M)
10329{
10330 IEMOP_MNEMONIC("lea Gv,M");
10331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10332 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10334 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10335
10336 switch (pIemCpu->enmEffOpSize)
10337 {
10338 case IEMMODE_16BIT:
10339 IEM_MC_BEGIN(0, 2);
10340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10341 IEM_MC_LOCAL(uint16_t, u16Cast);
10342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10343 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10344 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10345 IEM_MC_ADVANCE_RIP();
10346 IEM_MC_END();
10347 return VINF_SUCCESS;
10348
10349 case IEMMODE_32BIT:
10350 IEM_MC_BEGIN(0, 2);
10351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10352 IEM_MC_LOCAL(uint32_t, u32Cast);
10353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10354 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10355 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10356 IEM_MC_ADVANCE_RIP();
10357 IEM_MC_END();
10358 return VINF_SUCCESS;
10359
10360 case IEMMODE_64BIT:
10361 IEM_MC_BEGIN(0, 1);
10362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10364 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10365 IEM_MC_ADVANCE_RIP();
10366 IEM_MC_END();
10367 return VINF_SUCCESS;
10368 }
10369 AssertFailedReturn(VERR_IEM_IPE_7);
10370}
10371
10372
10373/** Opcode 0x8e. */
10374FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10375{
10376 IEMOP_MNEMONIC("mov Sw,Ev");
10377
10378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10379 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10380
10381 /*
10382 * The practical operand size is 16-bit.
10383 */
10384#if 0 /* not necessary */
10385 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10386#endif
10387
10388 /*
10389 * Check that the destination register exists and can be used with this
10390 * instruction. The REX.R prefix is ignored.
10391 */
10392 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10393 if ( iSegReg == X86_SREG_CS
10394 || iSegReg > X86_SREG_GS)
10395 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10396
10397 /*
10398 * If rm is denoting a register, no more instruction bytes.
10399 */
10400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10401 {
10402 IEM_MC_BEGIN(2, 0);
10403 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10404 IEM_MC_ARG(uint16_t, u16Value, 1);
10405 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10406 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10407 IEM_MC_END();
10408 }
10409 else
10410 {
10411 /*
10412 * We're loading the register from memory. The access is word sized
10413 * regardless of operand size prefixes.
10414 */
10415 IEM_MC_BEGIN(2, 1);
10416 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10417 IEM_MC_ARG(uint16_t, u16Value, 1);
10418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10420 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10421 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10422 IEM_MC_END();
10423 }
10424 return VINF_SUCCESS;
10425}
10426
10427
10428/** Opcode 0x8f /0. */
10429FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10430{
10431 /* This bugger is rather annoying as it requires rSP to be updated before
10432 doing the effective address calculations. Will eventually require a
10433 split between the R/M+SIB decoding and the effective address
10434 calculation - which is something that is required for any attempt at
10435 reusing this code for a recompiler. It may also be good to have if we
10436 need to delay #UD exception caused by invalid lock prefixes.
10437
10438 For now, we'll do a mostly safe interpreter-only implementation here. */
10439 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10440 * now until tests show it's checked.. */
10441 IEMOP_MNEMONIC("pop Ev");
10442 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10443
10444 /* Register access is relatively easy and can share code. */
10445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10446 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10447
10448 /*
10449 * Memory target.
10450 *
10451 * Intel says that RSP is incremented before it's used in any effective
10452 * address calcuations. This means some serious extra annoyance here since
10453 * we decode and calculate the effective address in one step and like to
10454 * delay committing registers till everything is done.
10455 *
10456 * So, we'll decode and calculate the effective address twice. This will
10457 * require some recoding if turned into a recompiler.
10458 */
10459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10460
10461#ifndef TST_IEM_CHECK_MC
10462 /* Calc effective address with modified ESP. */
10463 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10464 RTGCPTR GCPtrEff;
10465 VBOXSTRICTRC rcStrict;
10466 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10467 if (rcStrict != VINF_SUCCESS)
10468 return rcStrict;
10469 pIemCpu->offOpcode = offOpcodeSaved;
10470
10471 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10472 uint64_t const RspSaved = pCtx->rsp;
10473 switch (pIemCpu->enmEffOpSize)
10474 {
10475 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10476 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10477 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10479 }
10480 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10481 Assert(rcStrict == VINF_SUCCESS);
10482 pCtx->rsp = RspSaved;
10483
10484 /* Perform the operation - this should be CImpl. */
10485 RTUINT64U TmpRsp;
10486 TmpRsp.u = pCtx->rsp;
10487 switch (pIemCpu->enmEffOpSize)
10488 {
10489 case IEMMODE_16BIT:
10490 {
10491 uint16_t u16Value;
10492 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10493 if (rcStrict == VINF_SUCCESS)
10494 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10495 break;
10496 }
10497
10498 case IEMMODE_32BIT:
10499 {
10500 uint32_t u32Value;
10501 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10502 if (rcStrict == VINF_SUCCESS)
10503 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10504 break;
10505 }
10506
10507 case IEMMODE_64BIT:
10508 {
10509 uint64_t u64Value;
10510 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10511 if (rcStrict == VINF_SUCCESS)
10512 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10513 break;
10514 }
10515
10516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10517 }
10518 if (rcStrict == VINF_SUCCESS)
10519 {
10520 pCtx->rsp = TmpRsp.u;
10521 iemRegUpdateRipAndClearRF(pIemCpu);
10522 }
10523 return rcStrict;
10524
10525#else
10526 return VERR_IEM_IPE_2;
10527#endif
10528}
10529
10530
10531/** Opcode 0x8f. */
10532FNIEMOP_DEF(iemOp_Grp1A)
10533{
10534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10535 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10536 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10537
10538 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10539 /** @todo XOP decoding. */
10540 IEMOP_MNEMONIC("3-byte-xop");
10541 return IEMOP_RAISE_INVALID_OPCODE();
10542}
10543
10544
10545/**
10546 * Common 'xchg reg,rAX' helper.
10547 */
10548FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10549{
10550 IEMOP_HLP_NO_LOCK_PREFIX();
10551
10552 iReg |= pIemCpu->uRexB;
10553 switch (pIemCpu->enmEffOpSize)
10554 {
10555 case IEMMODE_16BIT:
10556 IEM_MC_BEGIN(0, 2);
10557 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10558 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10559 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10560 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10561 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10562 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10563 IEM_MC_ADVANCE_RIP();
10564 IEM_MC_END();
10565 return VINF_SUCCESS;
10566
10567 case IEMMODE_32BIT:
10568 IEM_MC_BEGIN(0, 2);
10569 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10570 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10571 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10572 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10573 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10574 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10575 IEM_MC_ADVANCE_RIP();
10576 IEM_MC_END();
10577 return VINF_SUCCESS;
10578
10579 case IEMMODE_64BIT:
10580 IEM_MC_BEGIN(0, 2);
10581 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10582 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10583 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10584 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10585 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10586 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10587 IEM_MC_ADVANCE_RIP();
10588 IEM_MC_END();
10589 return VINF_SUCCESS;
10590
10591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10592 }
10593}
10594
10595
10596/** Opcode 0x90. */
10597FNIEMOP_DEF(iemOp_nop)
10598{
10599 /* R8/R8D and RAX/EAX can be exchanged. */
10600 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10601 {
10602 IEMOP_MNEMONIC("xchg r8,rAX");
10603 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10604 }
10605
10606 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10607 IEMOP_MNEMONIC("pause");
10608 else
10609 IEMOP_MNEMONIC("nop");
10610 IEM_MC_BEGIN(0, 0);
10611 IEM_MC_ADVANCE_RIP();
10612 IEM_MC_END();
10613 return VINF_SUCCESS;
10614}
10615
10616
10617/** Opcode 0x91. */
10618FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10619{
10620 IEMOP_MNEMONIC("xchg rCX,rAX");
10621 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10622}
10623
10624
10625/** Opcode 0x92. */
10626FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10627{
10628 IEMOP_MNEMONIC("xchg rDX,rAX");
10629 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10630}
10631
10632
10633/** Opcode 0x93. */
10634FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10635{
10636 IEMOP_MNEMONIC("xchg rBX,rAX");
10637 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10638}
10639
10640
10641/** Opcode 0x94. */
10642FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10643{
10644 IEMOP_MNEMONIC("xchg rSX,rAX");
10645 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10646}
10647
10648
10649/** Opcode 0x95. */
10650FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10651{
10652 IEMOP_MNEMONIC("xchg rBP,rAX");
10653 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10654}
10655
10656
10657/** Opcode 0x96. */
10658FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10659{
10660 IEMOP_MNEMONIC("xchg rSI,rAX");
10661 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10662}
10663
10664
10665/** Opcode 0x97. */
10666FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10667{
10668 IEMOP_MNEMONIC("xchg rDI,rAX");
10669 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10670}
10671
10672
10673/** Opcode 0x98. */
10674FNIEMOP_DEF(iemOp_cbw)
10675{
10676 IEMOP_HLP_NO_LOCK_PREFIX();
10677 switch (pIemCpu->enmEffOpSize)
10678 {
10679 case IEMMODE_16BIT:
10680 IEMOP_MNEMONIC("cbw");
10681 IEM_MC_BEGIN(0, 1);
10682 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10683 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10684 } IEM_MC_ELSE() {
10685 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10686 } IEM_MC_ENDIF();
10687 IEM_MC_ADVANCE_RIP();
10688 IEM_MC_END();
10689 return VINF_SUCCESS;
10690
10691 case IEMMODE_32BIT:
10692 IEMOP_MNEMONIC("cwde");
10693 IEM_MC_BEGIN(0, 1);
10694 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10695 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10696 } IEM_MC_ELSE() {
10697 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10698 } IEM_MC_ENDIF();
10699 IEM_MC_ADVANCE_RIP();
10700 IEM_MC_END();
10701 return VINF_SUCCESS;
10702
10703 case IEMMODE_64BIT:
10704 IEMOP_MNEMONIC("cdqe");
10705 IEM_MC_BEGIN(0, 1);
10706 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10707 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10708 } IEM_MC_ELSE() {
10709 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10710 } IEM_MC_ENDIF();
10711 IEM_MC_ADVANCE_RIP();
10712 IEM_MC_END();
10713 return VINF_SUCCESS;
10714
10715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10716 }
10717}
10718
10719
10720/** Opcode 0x99. */
10721FNIEMOP_DEF(iemOp_cwd)
10722{
10723 IEMOP_HLP_NO_LOCK_PREFIX();
10724 switch (pIemCpu->enmEffOpSize)
10725 {
10726 case IEMMODE_16BIT:
10727 IEMOP_MNEMONIC("cwd");
10728 IEM_MC_BEGIN(0, 1);
10729 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10730 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10731 } IEM_MC_ELSE() {
10732 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10733 } IEM_MC_ENDIF();
10734 IEM_MC_ADVANCE_RIP();
10735 IEM_MC_END();
10736 return VINF_SUCCESS;
10737
10738 case IEMMODE_32BIT:
10739 IEMOP_MNEMONIC("cdq");
10740 IEM_MC_BEGIN(0, 1);
10741 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10742 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10743 } IEM_MC_ELSE() {
10744 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10745 } IEM_MC_ENDIF();
10746 IEM_MC_ADVANCE_RIP();
10747 IEM_MC_END();
10748 return VINF_SUCCESS;
10749
10750 case IEMMODE_64BIT:
10751 IEMOP_MNEMONIC("cqo");
10752 IEM_MC_BEGIN(0, 1);
10753 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10754 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10755 } IEM_MC_ELSE() {
10756 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10757 } IEM_MC_ENDIF();
10758 IEM_MC_ADVANCE_RIP();
10759 IEM_MC_END();
10760 return VINF_SUCCESS;
10761
10762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10763 }
10764}
10765
10766
10767/** Opcode 0x9a. */
10768FNIEMOP_DEF(iemOp_call_Ap)
10769{
10770 IEMOP_MNEMONIC("call Ap");
10771 IEMOP_HLP_NO_64BIT();
10772
10773 /* Decode the far pointer address and pass it on to the far call C implementation. */
10774 uint32_t offSeg;
10775 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10776 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10777 else
10778 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10779 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10781 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10782}
10783
10784
10785/** Opcode 0x9b. (aka fwait) */
10786FNIEMOP_DEF(iemOp_wait)
10787{
10788 IEMOP_MNEMONIC("wait");
10789 IEMOP_HLP_NO_LOCK_PREFIX();
10790
10791 IEM_MC_BEGIN(0, 0);
10792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10794 IEM_MC_ADVANCE_RIP();
10795 IEM_MC_END();
10796 return VINF_SUCCESS;
10797}
10798
10799
10800/** Opcode 0x9c. */
10801FNIEMOP_DEF(iemOp_pushf_Fv)
10802{
10803 IEMOP_HLP_NO_LOCK_PREFIX();
10804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10805 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10806}
10807
10808
10809/** Opcode 0x9d. */
10810FNIEMOP_DEF(iemOp_popf_Fv)
10811{
10812 IEMOP_HLP_NO_LOCK_PREFIX();
10813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10814 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10815}
10816
10817
10818/** Opcode 0x9e. */
10819FNIEMOP_DEF(iemOp_sahf)
10820{
10821 IEMOP_MNEMONIC("sahf");
10822 IEMOP_HLP_NO_LOCK_PREFIX();
10823 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10824 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10825 return IEMOP_RAISE_INVALID_OPCODE();
10826 IEM_MC_BEGIN(0, 2);
10827 IEM_MC_LOCAL(uint32_t, u32Flags);
10828 IEM_MC_LOCAL(uint32_t, EFlags);
10829 IEM_MC_FETCH_EFLAGS(EFlags);
10830 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10831 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10832 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10833 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10834 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10835 IEM_MC_COMMIT_EFLAGS(EFlags);
10836 IEM_MC_ADVANCE_RIP();
10837 IEM_MC_END();
10838 return VINF_SUCCESS;
10839}
10840
10841
10842/** Opcode 0x9f. */
10843FNIEMOP_DEF(iemOp_lahf)
10844{
10845 IEMOP_MNEMONIC("lahf");
10846 IEMOP_HLP_NO_LOCK_PREFIX();
10847 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10848 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10849 return IEMOP_RAISE_INVALID_OPCODE();
10850 IEM_MC_BEGIN(0, 1);
10851 IEM_MC_LOCAL(uint8_t, u8Flags);
10852 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10853 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10854 IEM_MC_ADVANCE_RIP();
10855 IEM_MC_END();
10856 return VINF_SUCCESS;
10857}
10858
10859
10860/**
10861 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10862 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10863 * prefixes. Will return on failures.
10864 * @param a_GCPtrMemOff The variable to store the offset in.
10865 */
10866#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10867 do \
10868 { \
10869 switch (pIemCpu->enmEffAddrMode) \
10870 { \
10871 case IEMMODE_16BIT: \
10872 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10873 break; \
10874 case IEMMODE_32BIT: \
10875 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10876 break; \
10877 case IEMMODE_64BIT: \
10878 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10879 break; \
10880 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10881 } \
10882 IEMOP_HLP_NO_LOCK_PREFIX(); \
10883 } while (0)
10884
10885/** Opcode 0xa0. */
10886FNIEMOP_DEF(iemOp_mov_Al_Ob)
10887{
10888 /*
10889 * Get the offset and fend of lock prefixes.
10890 */
10891 RTGCPTR GCPtrMemOff;
10892 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10893
10894 /*
10895 * Fetch AL.
10896 */
10897 IEM_MC_BEGIN(0,1);
10898 IEM_MC_LOCAL(uint8_t, u8Tmp);
10899 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10900 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10901 IEM_MC_ADVANCE_RIP();
10902 IEM_MC_END();
10903 return VINF_SUCCESS;
10904}
10905
10906
10907/** Opcode 0xa1. */
10908FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10909{
10910 /*
10911 * Get the offset and fend of lock prefixes.
10912 */
10913 IEMOP_MNEMONIC("mov rAX,Ov");
10914 RTGCPTR GCPtrMemOff;
10915 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10916
10917 /*
10918 * Fetch rAX.
10919 */
10920 switch (pIemCpu->enmEffOpSize)
10921 {
10922 case IEMMODE_16BIT:
10923 IEM_MC_BEGIN(0,1);
10924 IEM_MC_LOCAL(uint16_t, u16Tmp);
10925 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10926 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10927 IEM_MC_ADVANCE_RIP();
10928 IEM_MC_END();
10929 return VINF_SUCCESS;
10930
10931 case IEMMODE_32BIT:
10932 IEM_MC_BEGIN(0,1);
10933 IEM_MC_LOCAL(uint32_t, u32Tmp);
10934 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10935 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10936 IEM_MC_ADVANCE_RIP();
10937 IEM_MC_END();
10938 return VINF_SUCCESS;
10939
10940 case IEMMODE_64BIT:
10941 IEM_MC_BEGIN(0,1);
10942 IEM_MC_LOCAL(uint64_t, u64Tmp);
10943 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10944 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10945 IEM_MC_ADVANCE_RIP();
10946 IEM_MC_END();
10947 return VINF_SUCCESS;
10948
10949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10950 }
10951}
10952
10953
10954/** Opcode 0xa2. */
10955FNIEMOP_DEF(iemOp_mov_Ob_AL)
10956{
10957 /*
10958 * Get the offset and fend of lock prefixes.
10959 */
10960 RTGCPTR GCPtrMemOff;
10961 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10962
10963 /*
10964 * Store AL.
10965 */
10966 IEM_MC_BEGIN(0,1);
10967 IEM_MC_LOCAL(uint8_t, u8Tmp);
10968 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10969 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10970 IEM_MC_ADVANCE_RIP();
10971 IEM_MC_END();
10972 return VINF_SUCCESS;
10973}
10974
10975
10976/** Opcode 0xa3. */
10977FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10978{
10979 /*
10980 * Get the offset and fend of lock prefixes.
10981 */
10982 RTGCPTR GCPtrMemOff;
10983 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10984
10985 /*
10986 * Store rAX.
10987 */
10988 switch (pIemCpu->enmEffOpSize)
10989 {
10990 case IEMMODE_16BIT:
10991 IEM_MC_BEGIN(0,1);
10992 IEM_MC_LOCAL(uint16_t, u16Tmp);
10993 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10994 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10995 IEM_MC_ADVANCE_RIP();
10996 IEM_MC_END();
10997 return VINF_SUCCESS;
10998
10999 case IEMMODE_32BIT:
11000 IEM_MC_BEGIN(0,1);
11001 IEM_MC_LOCAL(uint32_t, u32Tmp);
11002 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11003 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11004 IEM_MC_ADVANCE_RIP();
11005 IEM_MC_END();
11006 return VINF_SUCCESS;
11007
11008 case IEMMODE_64BIT:
11009 IEM_MC_BEGIN(0,1);
11010 IEM_MC_LOCAL(uint64_t, u64Tmp);
11011 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11012 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11013 IEM_MC_ADVANCE_RIP();
11014 IEM_MC_END();
11015 return VINF_SUCCESS;
11016
11017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11018 }
11019}
11020
11021/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11022#define IEM_MOVS_CASE(ValBits, AddrBits) \
11023 IEM_MC_BEGIN(0, 2); \
11024 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11025 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11026 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11027 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11028 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11029 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11031 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11032 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11033 } IEM_MC_ELSE() { \
11034 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11035 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11036 } IEM_MC_ENDIF(); \
11037 IEM_MC_ADVANCE_RIP(); \
11038 IEM_MC_END();
11039
11040/** Opcode 0xa4. */
11041FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11042{
11043 IEMOP_HLP_NO_LOCK_PREFIX();
11044
11045 /*
11046 * Use the C implementation if a repeat prefix is encountered.
11047 */
11048 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11049 {
11050 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11051 switch (pIemCpu->enmEffAddrMode)
11052 {
11053 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11054 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11055 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11057 }
11058 }
11059 IEMOP_MNEMONIC("movsb Xb,Yb");
11060
11061 /*
11062 * Sharing case implementation with movs[wdq] below.
11063 */
11064 switch (pIemCpu->enmEffAddrMode)
11065 {
11066 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11067 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11068 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11070 }
11071 return VINF_SUCCESS;
11072}
11073
11074
11075/** Opcode 0xa5. */
11076FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11077{
11078 IEMOP_HLP_NO_LOCK_PREFIX();
11079
11080 /*
11081 * Use the C implementation if a repeat prefix is encountered.
11082 */
11083 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11084 {
11085 IEMOP_MNEMONIC("rep movs Xv,Yv");
11086 switch (pIemCpu->enmEffOpSize)
11087 {
11088 case IEMMODE_16BIT:
11089 switch (pIemCpu->enmEffAddrMode)
11090 {
11091 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11092 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11093 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11095 }
11096 break;
11097 case IEMMODE_32BIT:
11098 switch (pIemCpu->enmEffAddrMode)
11099 {
11100 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11101 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11102 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11104 }
11105 case IEMMODE_64BIT:
11106 switch (pIemCpu->enmEffAddrMode)
11107 {
11108 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11109 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11110 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11112 }
11113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11114 }
11115 }
11116 IEMOP_MNEMONIC("movs Xv,Yv");
11117
11118 /*
11119 * Annoying double switch here.
11120 * Using ugly macro for implementing the cases, sharing it with movsb.
11121 */
11122 switch (pIemCpu->enmEffOpSize)
11123 {
11124 case IEMMODE_16BIT:
11125 switch (pIemCpu->enmEffAddrMode)
11126 {
11127 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11128 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11129 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11131 }
11132 break;
11133
11134 case IEMMODE_32BIT:
11135 switch (pIemCpu->enmEffAddrMode)
11136 {
11137 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11138 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11139 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11141 }
11142 break;
11143
11144 case IEMMODE_64BIT:
11145 switch (pIemCpu->enmEffAddrMode)
11146 {
11147 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11148 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11149 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11151 }
11152 break;
11153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11154 }
11155 return VINF_SUCCESS;
11156}
11157
11158#undef IEM_MOVS_CASE
11159
11160/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11161#define IEM_CMPS_CASE(ValBits, AddrBits) \
11162 IEM_MC_BEGIN(3, 3); \
11163 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11164 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11165 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11166 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11167 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11168 \
11169 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11170 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11171 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11172 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11173 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11174 IEM_MC_REF_EFLAGS(pEFlags); \
11175 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11176 \
11177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11178 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11179 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11180 } IEM_MC_ELSE() { \
11181 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11182 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11183 } IEM_MC_ENDIF(); \
11184 IEM_MC_ADVANCE_RIP(); \
11185 IEM_MC_END(); \
11186
11187/** Opcode 0xa6. */
11188FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11189{
11190 IEMOP_HLP_NO_LOCK_PREFIX();
11191
11192 /*
11193 * Use the C implementation if a repeat prefix is encountered.
11194 */
11195 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11196 {
11197 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11198 switch (pIemCpu->enmEffAddrMode)
11199 {
11200 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11201 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11202 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11204 }
11205 }
11206 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11207 {
11208 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11209 switch (pIemCpu->enmEffAddrMode)
11210 {
11211 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11212 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11213 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11215 }
11216 }
11217 IEMOP_MNEMONIC("cmps Xb,Yb");
11218
11219 /*
11220 * Sharing case implementation with cmps[wdq] below.
11221 */
11222 switch (pIemCpu->enmEffAddrMode)
11223 {
11224 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11225 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11226 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11228 }
11229 return VINF_SUCCESS;
11230
11231}
11232
11233
11234/** Opcode 0xa7. */
11235FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11236{
11237 IEMOP_HLP_NO_LOCK_PREFIX();
11238
11239 /*
11240 * Use the C implementation if a repeat prefix is encountered.
11241 */
11242 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11243 {
11244 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11245 switch (pIemCpu->enmEffOpSize)
11246 {
11247 case IEMMODE_16BIT:
11248 switch (pIemCpu->enmEffAddrMode)
11249 {
11250 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11251 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11252 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11254 }
11255 break;
11256 case IEMMODE_32BIT:
11257 switch (pIemCpu->enmEffAddrMode)
11258 {
11259 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11260 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11261 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11263 }
11264 case IEMMODE_64BIT:
11265 switch (pIemCpu->enmEffAddrMode)
11266 {
11267 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11268 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11269 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11271 }
11272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11273 }
11274 }
11275
11276 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11277 {
11278 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11279 switch (pIemCpu->enmEffOpSize)
11280 {
11281 case IEMMODE_16BIT:
11282 switch (pIemCpu->enmEffAddrMode)
11283 {
11284 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11285 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11286 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11288 }
11289 break;
11290 case IEMMODE_32BIT:
11291 switch (pIemCpu->enmEffAddrMode)
11292 {
11293 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11294 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11295 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11297 }
11298 case IEMMODE_64BIT:
11299 switch (pIemCpu->enmEffAddrMode)
11300 {
11301 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11302 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11303 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11305 }
11306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11307 }
11308 }
11309
11310 IEMOP_MNEMONIC("cmps Xv,Yv");
11311
11312 /*
11313 * Annoying double switch here.
11314 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11315 */
11316 switch (pIemCpu->enmEffOpSize)
11317 {
11318 case IEMMODE_16BIT:
11319 switch (pIemCpu->enmEffAddrMode)
11320 {
11321 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11322 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11323 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11325 }
11326 break;
11327
11328 case IEMMODE_32BIT:
11329 switch (pIemCpu->enmEffAddrMode)
11330 {
11331 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11332 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11333 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11335 }
11336 break;
11337
11338 case IEMMODE_64BIT:
11339 switch (pIemCpu->enmEffAddrMode)
11340 {
11341 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11342 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11343 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11345 }
11346 break;
11347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11348 }
11349 return VINF_SUCCESS;
11350
11351}
11352
11353#undef IEM_CMPS_CASE
11354
11355/** Opcode 0xa8. */
11356FNIEMOP_DEF(iemOp_test_AL_Ib)
11357{
11358 IEMOP_MNEMONIC("test al,Ib");
11359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11360 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11361}
11362
11363
11364/** Opcode 0xa9. */
11365FNIEMOP_DEF(iemOp_test_eAX_Iz)
11366{
11367 IEMOP_MNEMONIC("test rAX,Iz");
11368 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11369 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11370}
11371
11372
11373/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11374#define IEM_STOS_CASE(ValBits, AddrBits) \
11375 IEM_MC_BEGIN(0, 2); \
11376 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11377 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11378 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11379 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11380 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11381 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11382 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11383 } IEM_MC_ELSE() { \
11384 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11385 } IEM_MC_ENDIF(); \
11386 IEM_MC_ADVANCE_RIP(); \
11387 IEM_MC_END(); \
11388
11389/** Opcode 0xaa. */
11390FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11391{
11392 IEMOP_HLP_NO_LOCK_PREFIX();
11393
11394 /*
11395 * Use the C implementation if a repeat prefix is encountered.
11396 */
11397 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11398 {
11399 IEMOP_MNEMONIC("rep stos Yb,al");
11400 switch (pIemCpu->enmEffAddrMode)
11401 {
11402 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11403 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11404 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11406 }
11407 }
11408 IEMOP_MNEMONIC("stos Yb,al");
11409
11410 /*
11411 * Sharing case implementation with stos[wdq] below.
11412 */
11413 switch (pIemCpu->enmEffAddrMode)
11414 {
11415 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11416 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11417 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11419 }
11420 return VINF_SUCCESS;
11421}
11422
11423
11424/** Opcode 0xab. */
11425FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11426{
11427 IEMOP_HLP_NO_LOCK_PREFIX();
11428
11429 /*
11430 * Use the C implementation if a repeat prefix is encountered.
11431 */
11432 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11433 {
11434 IEMOP_MNEMONIC("rep stos Yv,rAX");
11435 switch (pIemCpu->enmEffOpSize)
11436 {
11437 case IEMMODE_16BIT:
11438 switch (pIemCpu->enmEffAddrMode)
11439 {
11440 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11441 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11442 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11444 }
11445 break;
11446 case IEMMODE_32BIT:
11447 switch (pIemCpu->enmEffAddrMode)
11448 {
11449 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11450 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11451 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11453 }
11454 case IEMMODE_64BIT:
11455 switch (pIemCpu->enmEffAddrMode)
11456 {
11457 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11458 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11459 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11461 }
11462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11463 }
11464 }
11465 IEMOP_MNEMONIC("stos Yv,rAX");
11466
11467 /*
11468 * Annoying double switch here.
11469 * Using ugly macro for implementing the cases, sharing it with stosb.
11470 */
11471 switch (pIemCpu->enmEffOpSize)
11472 {
11473 case IEMMODE_16BIT:
11474 switch (pIemCpu->enmEffAddrMode)
11475 {
11476 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11477 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11478 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 break;
11482
11483 case IEMMODE_32BIT:
11484 switch (pIemCpu->enmEffAddrMode)
11485 {
11486 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11487 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11488 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 break;
11492
11493 case IEMMODE_64BIT:
11494 switch (pIemCpu->enmEffAddrMode)
11495 {
11496 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11497 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11498 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11500 }
11501 break;
11502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11503 }
11504 return VINF_SUCCESS;
11505}
11506
11507#undef IEM_STOS_CASE
11508
11509/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11510#define IEM_LODS_CASE(ValBits, AddrBits) \
11511 IEM_MC_BEGIN(0, 2); \
11512 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11513 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11514 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11515 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11516 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11517 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11518 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11519 } IEM_MC_ELSE() { \
11520 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11521 } IEM_MC_ENDIF(); \
11522 IEM_MC_ADVANCE_RIP(); \
11523 IEM_MC_END();
11524
11525/** Opcode 0xac. */
11526FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11527{
11528 IEMOP_HLP_NO_LOCK_PREFIX();
11529
11530 /*
11531 * Use the C implementation if a repeat prefix is encountered.
11532 */
11533 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11534 {
11535 IEMOP_MNEMONIC("rep lodsb al,Xb");
11536 switch (pIemCpu->enmEffAddrMode)
11537 {
11538 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11539 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11540 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11542 }
11543 }
11544 IEMOP_MNEMONIC("lodsb al,Xb");
11545
11546 /*
11547 * Sharing case implementation with stos[wdq] below.
11548 */
11549 switch (pIemCpu->enmEffAddrMode)
11550 {
11551 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11552 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11553 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11555 }
11556 return VINF_SUCCESS;
11557}
11558
11559
11560/** Opcode 0xad. */
11561FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11562{
11563 IEMOP_HLP_NO_LOCK_PREFIX();
11564
11565 /*
11566 * Use the C implementation if a repeat prefix is encountered.
11567 */
11568 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11569 {
11570 IEMOP_MNEMONIC("rep lods rAX,Xv");
11571 switch (pIemCpu->enmEffOpSize)
11572 {
11573 case IEMMODE_16BIT:
11574 switch (pIemCpu->enmEffAddrMode)
11575 {
11576 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11577 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11578 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11580 }
11581 break;
11582 case IEMMODE_32BIT:
11583 switch (pIemCpu->enmEffAddrMode)
11584 {
11585 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11586 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11587 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11589 }
11590 case IEMMODE_64BIT:
11591 switch (pIemCpu->enmEffAddrMode)
11592 {
11593 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11594 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11595 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11597 }
11598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11599 }
11600 }
11601 IEMOP_MNEMONIC("lods rAX,Xv");
11602
11603 /*
11604 * Annoying double switch here.
11605 * Using ugly macro for implementing the cases, sharing it with lodsb.
11606 */
11607 switch (pIemCpu->enmEffOpSize)
11608 {
11609 case IEMMODE_16BIT:
11610 switch (pIemCpu->enmEffAddrMode)
11611 {
11612 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11613 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11614 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11616 }
11617 break;
11618
11619 case IEMMODE_32BIT:
11620 switch (pIemCpu->enmEffAddrMode)
11621 {
11622 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11623 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11624 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11626 }
11627 break;
11628
11629 case IEMMODE_64BIT:
11630 switch (pIemCpu->enmEffAddrMode)
11631 {
11632 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11633 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11634 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11636 }
11637 break;
11638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11639 }
11640 return VINF_SUCCESS;
11641}
11642
11643#undef IEM_LODS_CASE
11644
11645/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11646#define IEM_SCAS_CASE(ValBits, AddrBits) \
11647 IEM_MC_BEGIN(3, 2); \
11648 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11649 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11651 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11652 \
11653 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11654 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11655 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11656 IEM_MC_REF_EFLAGS(pEFlags); \
11657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11658 \
11659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11660 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11661 } IEM_MC_ELSE() { \
11662 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11663 } IEM_MC_ENDIF(); \
11664 IEM_MC_ADVANCE_RIP(); \
11665 IEM_MC_END();
11666
11667/** Opcode 0xae. */
11668FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11669{
11670 IEMOP_HLP_NO_LOCK_PREFIX();
11671
11672 /*
11673 * Use the C implementation if a repeat prefix is encountered.
11674 */
11675 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11676 {
11677 IEMOP_MNEMONIC("repe scasb al,Xb");
11678 switch (pIemCpu->enmEffAddrMode)
11679 {
11680 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11681 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11682 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11684 }
11685 }
11686 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11687 {
11688 IEMOP_MNEMONIC("repne scasb al,Xb");
11689 switch (pIemCpu->enmEffAddrMode)
11690 {
11691 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11692 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11693 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11695 }
11696 }
11697 IEMOP_MNEMONIC("scasb al,Xb");
11698
11699 /*
11700 * Sharing case implementation with stos[wdq] below.
11701 */
11702 switch (pIemCpu->enmEffAddrMode)
11703 {
11704 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11705 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11706 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11708 }
11709 return VINF_SUCCESS;
11710}
11711
11712
11713/** Opcode 0xaf. */
11714FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11715{
11716 IEMOP_HLP_NO_LOCK_PREFIX();
11717
11718 /*
11719 * Use the C implementation if a repeat prefix is encountered.
11720 */
11721 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11722 {
11723 IEMOP_MNEMONIC("repe scas rAX,Xv");
11724 switch (pIemCpu->enmEffOpSize)
11725 {
11726 case IEMMODE_16BIT:
11727 switch (pIemCpu->enmEffAddrMode)
11728 {
11729 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11730 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11731 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11733 }
11734 break;
11735 case IEMMODE_32BIT:
11736 switch (pIemCpu->enmEffAddrMode)
11737 {
11738 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11739 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11740 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11742 }
11743 case IEMMODE_64BIT:
11744 switch (pIemCpu->enmEffAddrMode)
11745 {
11746 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11747 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11748 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11750 }
11751 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11752 }
11753 }
11754 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11755 {
11756 IEMOP_MNEMONIC("repne scas rAX,Xv");
11757 switch (pIemCpu->enmEffOpSize)
11758 {
11759 case IEMMODE_16BIT:
11760 switch (pIemCpu->enmEffAddrMode)
11761 {
11762 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11763 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11764 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11766 }
11767 break;
11768 case IEMMODE_32BIT:
11769 switch (pIemCpu->enmEffAddrMode)
11770 {
11771 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11772 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11773 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11775 }
11776 case IEMMODE_64BIT:
11777 switch (pIemCpu->enmEffAddrMode)
11778 {
11779 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11780 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11781 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11783 }
11784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11785 }
11786 }
11787 IEMOP_MNEMONIC("scas rAX,Xv");
11788
11789 /*
11790 * Annoying double switch here.
11791 * Using ugly macro for implementing the cases, sharing it with scasb.
11792 */
11793 switch (pIemCpu->enmEffOpSize)
11794 {
11795 case IEMMODE_16BIT:
11796 switch (pIemCpu->enmEffAddrMode)
11797 {
11798 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11799 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11800 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11802 }
11803 break;
11804
11805 case IEMMODE_32BIT:
11806 switch (pIemCpu->enmEffAddrMode)
11807 {
11808 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11809 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11810 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11812 }
11813 break;
11814
11815 case IEMMODE_64BIT:
11816 switch (pIemCpu->enmEffAddrMode)
11817 {
11818 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11819 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11820 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11822 }
11823 break;
11824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11825 }
11826 return VINF_SUCCESS;
11827}
11828
11829#undef IEM_SCAS_CASE
11830
11831/**
11832 * Common 'mov r8, imm8' helper.
11833 */
11834FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11835{
11836 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11837 IEMOP_HLP_NO_LOCK_PREFIX();
11838
11839 IEM_MC_BEGIN(0, 1);
11840 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11841 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11842 IEM_MC_ADVANCE_RIP();
11843 IEM_MC_END();
11844
11845 return VINF_SUCCESS;
11846}
11847
11848
11849/** Opcode 0xb0. */
11850FNIEMOP_DEF(iemOp_mov_AL_Ib)
11851{
11852 IEMOP_MNEMONIC("mov AL,Ib");
11853 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11854}
11855
11856
11857/** Opcode 0xb1. */
11858FNIEMOP_DEF(iemOp_CL_Ib)
11859{
11860 IEMOP_MNEMONIC("mov CL,Ib");
11861 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11862}
11863
11864
11865/** Opcode 0xb2. */
11866FNIEMOP_DEF(iemOp_DL_Ib)
11867{
11868 IEMOP_MNEMONIC("mov DL,Ib");
11869 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11870}
11871
11872
11873/** Opcode 0xb3. */
11874FNIEMOP_DEF(iemOp_BL_Ib)
11875{
11876 IEMOP_MNEMONIC("mov BL,Ib");
11877 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11878}
11879
11880
11881/** Opcode 0xb4. */
11882FNIEMOP_DEF(iemOp_mov_AH_Ib)
11883{
11884 IEMOP_MNEMONIC("mov AH,Ib");
11885 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11886}
11887
11888
11889/** Opcode 0xb5. */
11890FNIEMOP_DEF(iemOp_CH_Ib)
11891{
11892 IEMOP_MNEMONIC("mov CH,Ib");
11893 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11894}
11895
11896
11897/** Opcode 0xb6. */
11898FNIEMOP_DEF(iemOp_DH_Ib)
11899{
11900 IEMOP_MNEMONIC("mov DH,Ib");
11901 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11902}
11903
11904
11905/** Opcode 0xb7. */
11906FNIEMOP_DEF(iemOp_BH_Ib)
11907{
11908 IEMOP_MNEMONIC("mov BH,Ib");
11909 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11910}
11911
11912
11913/**
11914 * Common 'mov regX,immX' helper.
11915 */
11916FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11917{
11918 switch (pIemCpu->enmEffOpSize)
11919 {
11920 case IEMMODE_16BIT:
11921 {
11922 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11923 IEMOP_HLP_NO_LOCK_PREFIX();
11924
11925 IEM_MC_BEGIN(0, 1);
11926 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11927 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11928 IEM_MC_ADVANCE_RIP();
11929 IEM_MC_END();
11930 break;
11931 }
11932
11933 case IEMMODE_32BIT:
11934 {
11935 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11936 IEMOP_HLP_NO_LOCK_PREFIX();
11937
11938 IEM_MC_BEGIN(0, 1);
11939 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11940 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11941 IEM_MC_ADVANCE_RIP();
11942 IEM_MC_END();
11943 break;
11944 }
11945 case IEMMODE_64BIT:
11946 {
11947 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11948 IEMOP_HLP_NO_LOCK_PREFIX();
11949
11950 IEM_MC_BEGIN(0, 1);
11951 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11952 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11953 IEM_MC_ADVANCE_RIP();
11954 IEM_MC_END();
11955 break;
11956 }
11957 }
11958
11959 return VINF_SUCCESS;
11960}
11961
11962
11963/** Opcode 0xb8. */
11964FNIEMOP_DEF(iemOp_eAX_Iv)
11965{
11966 IEMOP_MNEMONIC("mov rAX,IV");
11967 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11968}
11969
11970
11971/** Opcode 0xb9. */
11972FNIEMOP_DEF(iemOp_eCX_Iv)
11973{
11974 IEMOP_MNEMONIC("mov rCX,IV");
11975 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11976}
11977
11978
11979/** Opcode 0xba. */
11980FNIEMOP_DEF(iemOp_eDX_Iv)
11981{
11982 IEMOP_MNEMONIC("mov rDX,IV");
11983 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11984}
11985
11986
11987/** Opcode 0xbb. */
11988FNIEMOP_DEF(iemOp_eBX_Iv)
11989{
11990 IEMOP_MNEMONIC("mov rBX,IV");
11991 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11992}
11993
11994
11995/** Opcode 0xbc. */
11996FNIEMOP_DEF(iemOp_eSP_Iv)
11997{
11998 IEMOP_MNEMONIC("mov rSP,IV");
11999 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12000}
12001
12002
12003/** Opcode 0xbd. */
12004FNIEMOP_DEF(iemOp_eBP_Iv)
12005{
12006 IEMOP_MNEMONIC("mov rBP,IV");
12007 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12008}
12009
12010
12011/** Opcode 0xbe. */
12012FNIEMOP_DEF(iemOp_eSI_Iv)
12013{
12014 IEMOP_MNEMONIC("mov rSI,IV");
12015 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12016}
12017
12018
12019/** Opcode 0xbf. */
12020FNIEMOP_DEF(iemOp_eDI_Iv)
12021{
12022 IEMOP_MNEMONIC("mov rDI,IV");
12023 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12024}
12025
12026
12027/** Opcode 0xc0. */
12028FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12029{
12030 IEMOP_HLP_MIN_186();
12031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12032 PCIEMOPSHIFTSIZES pImpl;
12033 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12034 {
12035 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12036 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12037 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12038 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12039 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12040 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12041 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12042 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12043 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12044 }
12045 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12046
12047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12048 {
12049 /* register */
12050 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12051 IEMOP_HLP_NO_LOCK_PREFIX();
12052 IEM_MC_BEGIN(3, 0);
12053 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12054 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12056 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12057 IEM_MC_REF_EFLAGS(pEFlags);
12058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12059 IEM_MC_ADVANCE_RIP();
12060 IEM_MC_END();
12061 }
12062 else
12063 {
12064 /* memory */
12065 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12066 IEM_MC_BEGIN(3, 2);
12067 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12068 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12069 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12071
12072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12073 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12074 IEM_MC_ASSIGN(cShiftArg, cShift);
12075 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12076 IEM_MC_FETCH_EFLAGS(EFlags);
12077 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12078
12079 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12080 IEM_MC_COMMIT_EFLAGS(EFlags);
12081 IEM_MC_ADVANCE_RIP();
12082 IEM_MC_END();
12083 }
12084 return VINF_SUCCESS;
12085}
12086
12087
12088/** Opcode 0xc1. */
12089FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12090{
12091 IEMOP_HLP_MIN_186();
12092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12093 PCIEMOPSHIFTSIZES pImpl;
12094 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12095 {
12096 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12097 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12098 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12099 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12100 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12101 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12102 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12103 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12104 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12105 }
12106 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12107
12108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12109 {
12110 /* register */
12111 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12112 IEMOP_HLP_NO_LOCK_PREFIX();
12113 switch (pIemCpu->enmEffOpSize)
12114 {
12115 case IEMMODE_16BIT:
12116 IEM_MC_BEGIN(3, 0);
12117 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12118 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12119 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12121 IEM_MC_REF_EFLAGS(pEFlags);
12122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12123 IEM_MC_ADVANCE_RIP();
12124 IEM_MC_END();
12125 return VINF_SUCCESS;
12126
12127 case IEMMODE_32BIT:
12128 IEM_MC_BEGIN(3, 0);
12129 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12130 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12131 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12132 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12133 IEM_MC_REF_EFLAGS(pEFlags);
12134 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12135 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12136 IEM_MC_ADVANCE_RIP();
12137 IEM_MC_END();
12138 return VINF_SUCCESS;
12139
12140 case IEMMODE_64BIT:
12141 IEM_MC_BEGIN(3, 0);
12142 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12143 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12146 IEM_MC_REF_EFLAGS(pEFlags);
12147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12148 IEM_MC_ADVANCE_RIP();
12149 IEM_MC_END();
12150 return VINF_SUCCESS;
12151
12152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12153 }
12154 }
12155 else
12156 {
12157 /* memory */
12158 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12159 switch (pIemCpu->enmEffOpSize)
12160 {
12161 case IEMMODE_16BIT:
12162 IEM_MC_BEGIN(3, 2);
12163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12164 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12167
12168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12169 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12170 IEM_MC_ASSIGN(cShiftArg, cShift);
12171 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12172 IEM_MC_FETCH_EFLAGS(EFlags);
12173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12174
12175 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12176 IEM_MC_COMMIT_EFLAGS(EFlags);
12177 IEM_MC_ADVANCE_RIP();
12178 IEM_MC_END();
12179 return VINF_SUCCESS;
12180
12181 case IEMMODE_32BIT:
12182 IEM_MC_BEGIN(3, 2);
12183 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12184 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12185 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12187
12188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12189 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12190 IEM_MC_ASSIGN(cShiftArg, cShift);
12191 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12192 IEM_MC_FETCH_EFLAGS(EFlags);
12193 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12194
12195 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12196 IEM_MC_COMMIT_EFLAGS(EFlags);
12197 IEM_MC_ADVANCE_RIP();
12198 IEM_MC_END();
12199 return VINF_SUCCESS;
12200
12201 case IEMMODE_64BIT:
12202 IEM_MC_BEGIN(3, 2);
12203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12204 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12205 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12207
12208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12209 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12210 IEM_MC_ASSIGN(cShiftArg, cShift);
12211 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12212 IEM_MC_FETCH_EFLAGS(EFlags);
12213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12214
12215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12216 IEM_MC_COMMIT_EFLAGS(EFlags);
12217 IEM_MC_ADVANCE_RIP();
12218 IEM_MC_END();
12219 return VINF_SUCCESS;
12220
12221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12222 }
12223 }
12224}
12225
12226
12227/** Opcode 0xc2. */
12228FNIEMOP_DEF(iemOp_retn_Iw)
12229{
12230 IEMOP_MNEMONIC("retn Iw");
12231 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12232 IEMOP_HLP_NO_LOCK_PREFIX();
12233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12234 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12235}
12236
12237
12238/** Opcode 0xc3. */
12239FNIEMOP_DEF(iemOp_retn)
12240{
12241 IEMOP_MNEMONIC("retn");
12242 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12243 IEMOP_HLP_NO_LOCK_PREFIX();
12244 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12245}
12246
12247
12248/** Opcode 0xc4. */
12249FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12250{
12251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12252 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12253 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12254 {
12255 IEMOP_MNEMONIC("2-byte-vex");
12256 /* The LES instruction is invalid 64-bit mode. In legacy and
12257 compatability mode it is invalid with MOD=3.
12258 The use as a VEX prefix is made possible by assigning the inverted
12259 REX.R to the top MOD bit, and the top bit in the inverted register
12260 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12261 to accessing registers 0..7 in this VEX form. */
12262 /** @todo VEX: Just use new tables for it. */
12263 return IEMOP_RAISE_INVALID_OPCODE();
12264 }
12265 IEMOP_MNEMONIC("les Gv,Mp");
12266 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12267}
12268
12269
12270/** Opcode 0xc5. */
12271FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12272{
12273 /* The LDS instruction is invalid 64-bit mode. In legacy and
12274 compatability mode it is invalid with MOD=3.
12275 The use as a VEX prefix is made possible by assigning the inverted
12276 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12277 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12279 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12280 {
12281 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12282 {
12283 IEMOP_MNEMONIC("lds Gv,Mp");
12284 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12285 }
12286 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12287 }
12288
12289 IEMOP_MNEMONIC("3-byte-vex");
12290 /** @todo Test when exctly the VEX conformance checks kick in during
12291 * instruction decoding and fetching (using \#PF). */
12292 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12293 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12294 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12295#if 0 /* will make sense of this next week... */
12296 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12297 &&
12298 )
12299 {
12300
12301 }
12302#endif
12303
12304 /** @todo VEX: Just use new tables for it. */
12305 return IEMOP_RAISE_INVALID_OPCODE();
12306}
12307
12308
12309/** Opcode 0xc6. */
12310FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12311{
12312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12313 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12314 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12315 return IEMOP_RAISE_INVALID_OPCODE();
12316 IEMOP_MNEMONIC("mov Eb,Ib");
12317
12318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12319 {
12320 /* register access */
12321 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12322 IEM_MC_BEGIN(0, 0);
12323 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12324 IEM_MC_ADVANCE_RIP();
12325 IEM_MC_END();
12326 }
12327 else
12328 {
12329 /* memory access. */
12330 IEM_MC_BEGIN(0, 1);
12331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12333 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12334 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12335 IEM_MC_ADVANCE_RIP();
12336 IEM_MC_END();
12337 }
12338 return VINF_SUCCESS;
12339}
12340
12341
12342/** Opcode 0xc7. */
12343FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12344{
12345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12346 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12347 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12348 return IEMOP_RAISE_INVALID_OPCODE();
12349 IEMOP_MNEMONIC("mov Ev,Iz");
12350
12351 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12352 {
12353 /* register access */
12354 switch (pIemCpu->enmEffOpSize)
12355 {
12356 case IEMMODE_16BIT:
12357 IEM_MC_BEGIN(0, 0);
12358 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12359 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12360 IEM_MC_ADVANCE_RIP();
12361 IEM_MC_END();
12362 return VINF_SUCCESS;
12363
12364 case IEMMODE_32BIT:
12365 IEM_MC_BEGIN(0, 0);
12366 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12367 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12368 IEM_MC_ADVANCE_RIP();
12369 IEM_MC_END();
12370 return VINF_SUCCESS;
12371
12372 case IEMMODE_64BIT:
12373 IEM_MC_BEGIN(0, 0);
12374 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12375 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12376 IEM_MC_ADVANCE_RIP();
12377 IEM_MC_END();
12378 return VINF_SUCCESS;
12379
12380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12381 }
12382 }
12383 else
12384 {
12385 /* memory access. */
12386 switch (pIemCpu->enmEffOpSize)
12387 {
12388 case IEMMODE_16BIT:
12389 IEM_MC_BEGIN(0, 1);
12390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12392 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12393 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12394 IEM_MC_ADVANCE_RIP();
12395 IEM_MC_END();
12396 return VINF_SUCCESS;
12397
12398 case IEMMODE_32BIT:
12399 IEM_MC_BEGIN(0, 1);
12400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12402 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12403 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12404 IEM_MC_ADVANCE_RIP();
12405 IEM_MC_END();
12406 return VINF_SUCCESS;
12407
12408 case IEMMODE_64BIT:
12409 IEM_MC_BEGIN(0, 1);
12410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12412 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12413 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12414 IEM_MC_ADVANCE_RIP();
12415 IEM_MC_END();
12416 return VINF_SUCCESS;
12417
12418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12419 }
12420 }
12421}
12422
12423
12424
12425
12426/** Opcode 0xc8. */
12427FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12428{
12429 IEMOP_MNEMONIC("enter Iw,Ib");
12430 IEMOP_HLP_MIN_186();
12431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12432 IEMOP_HLP_NO_LOCK_PREFIX();
12433 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12434 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12435 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12436}
12437
12438
12439/** Opcode 0xc9. */
12440FNIEMOP_DEF(iemOp_leave)
12441{
12442 IEMOP_MNEMONIC("retn");
12443 IEMOP_HLP_MIN_186();
12444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12445 IEMOP_HLP_NO_LOCK_PREFIX();
12446 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12447}
12448
12449
12450/** Opcode 0xca. */
12451FNIEMOP_DEF(iemOp_retf_Iw)
12452{
12453 IEMOP_MNEMONIC("retf Iw");
12454 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12455 IEMOP_HLP_NO_LOCK_PREFIX();
12456 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12457 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12458}
12459
12460
12461/** Opcode 0xcb. */
12462FNIEMOP_DEF(iemOp_retf)
12463{
12464 IEMOP_MNEMONIC("retf");
12465 IEMOP_HLP_NO_LOCK_PREFIX();
12466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12467 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12468}
12469
12470
12471/** Opcode 0xcc. */
12472FNIEMOP_DEF(iemOp_int_3)
12473{
12474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12475 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12476}
12477
12478
12479/** Opcode 0xcd. */
12480FNIEMOP_DEF(iemOp_int_Ib)
12481{
12482 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12485}
12486
12487
12488/** Opcode 0xce. */
12489FNIEMOP_DEF(iemOp_into)
12490{
12491 IEMOP_MNEMONIC("into");
12492 IEMOP_HLP_NO_64BIT();
12493
12494 IEM_MC_BEGIN(2, 0);
12495 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12496 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12497 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12498 IEM_MC_END();
12499 return VINF_SUCCESS;
12500}
12501
12502
12503/** Opcode 0xcf. */
12504FNIEMOP_DEF(iemOp_iret)
12505{
12506 IEMOP_MNEMONIC("iret");
12507 IEMOP_HLP_NO_LOCK_PREFIX();
12508 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12509}
12510
12511
12512/** Opcode 0xd0. */
12513FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12514{
12515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12516 PCIEMOPSHIFTSIZES pImpl;
12517 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12518 {
12519 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12520 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12521 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12522 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12523 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12524 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12525 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12526 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12527 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12528 }
12529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12530
12531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12532 {
12533 /* register */
12534 IEMOP_HLP_NO_LOCK_PREFIX();
12535 IEM_MC_BEGIN(3, 0);
12536 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12537 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12539 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12540 IEM_MC_REF_EFLAGS(pEFlags);
12541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12542 IEM_MC_ADVANCE_RIP();
12543 IEM_MC_END();
12544 }
12545 else
12546 {
12547 /* memory */
12548 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12549 IEM_MC_BEGIN(3, 2);
12550 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12551 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12552 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12554
12555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12556 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12557 IEM_MC_FETCH_EFLAGS(EFlags);
12558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12559
12560 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12561 IEM_MC_COMMIT_EFLAGS(EFlags);
12562 IEM_MC_ADVANCE_RIP();
12563 IEM_MC_END();
12564 }
12565 return VINF_SUCCESS;
12566}
12567
12568
12569
12570/** Opcode 0xd1. */
12571FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12572{
12573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12574 PCIEMOPSHIFTSIZES pImpl;
12575 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12576 {
12577 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12578 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12579 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12580 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12581 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12582 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12583 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12584 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12585 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12586 }
12587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12588
12589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12590 {
12591 /* register */
12592 IEMOP_HLP_NO_LOCK_PREFIX();
12593 switch (pIemCpu->enmEffOpSize)
12594 {
12595 case IEMMODE_16BIT:
12596 IEM_MC_BEGIN(3, 0);
12597 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12598 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12600 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12601 IEM_MC_REF_EFLAGS(pEFlags);
12602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12603 IEM_MC_ADVANCE_RIP();
12604 IEM_MC_END();
12605 return VINF_SUCCESS;
12606
12607 case IEMMODE_32BIT:
12608 IEM_MC_BEGIN(3, 0);
12609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12610 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12612 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12613 IEM_MC_REF_EFLAGS(pEFlags);
12614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12615 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12616 IEM_MC_ADVANCE_RIP();
12617 IEM_MC_END();
12618 return VINF_SUCCESS;
12619
12620 case IEMMODE_64BIT:
12621 IEM_MC_BEGIN(3, 0);
12622 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12623 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12624 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12625 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12626 IEM_MC_REF_EFLAGS(pEFlags);
12627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12628 IEM_MC_ADVANCE_RIP();
12629 IEM_MC_END();
12630 return VINF_SUCCESS;
12631
12632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12633 }
12634 }
12635 else
12636 {
12637 /* memory */
12638 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12639 switch (pIemCpu->enmEffOpSize)
12640 {
12641 case IEMMODE_16BIT:
12642 IEM_MC_BEGIN(3, 2);
12643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12644 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12645 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12647
12648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12649 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12650 IEM_MC_FETCH_EFLAGS(EFlags);
12651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12652
12653 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12654 IEM_MC_COMMIT_EFLAGS(EFlags);
12655 IEM_MC_ADVANCE_RIP();
12656 IEM_MC_END();
12657 return VINF_SUCCESS;
12658
12659 case IEMMODE_32BIT:
12660 IEM_MC_BEGIN(3, 2);
12661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12662 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12665
12666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12667 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12668 IEM_MC_FETCH_EFLAGS(EFlags);
12669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12670
12671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12672 IEM_MC_COMMIT_EFLAGS(EFlags);
12673 IEM_MC_ADVANCE_RIP();
12674 IEM_MC_END();
12675 return VINF_SUCCESS;
12676
12677 case IEMMODE_64BIT:
12678 IEM_MC_BEGIN(3, 2);
12679 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12680 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12681 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12683
12684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12685 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12686 IEM_MC_FETCH_EFLAGS(EFlags);
12687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12688
12689 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12690 IEM_MC_COMMIT_EFLAGS(EFlags);
12691 IEM_MC_ADVANCE_RIP();
12692 IEM_MC_END();
12693 return VINF_SUCCESS;
12694
12695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12696 }
12697 }
12698}
12699
12700
12701/** Opcode 0xd2. */
12702FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12703{
12704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12705 PCIEMOPSHIFTSIZES pImpl;
12706 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12707 {
12708 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12709 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12710 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12711 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12712 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12713 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12714 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12715 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12716 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12717 }
12718 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12719
12720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12721 {
12722 /* register */
12723 IEMOP_HLP_NO_LOCK_PREFIX();
12724 IEM_MC_BEGIN(3, 0);
12725 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12726 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12727 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12728 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12729 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12730 IEM_MC_REF_EFLAGS(pEFlags);
12731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12732 IEM_MC_ADVANCE_RIP();
12733 IEM_MC_END();
12734 }
12735 else
12736 {
12737 /* memory */
12738 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12739 IEM_MC_BEGIN(3, 2);
12740 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12741 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12742 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12744
12745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12746 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12747 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12748 IEM_MC_FETCH_EFLAGS(EFlags);
12749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12750
12751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12752 IEM_MC_COMMIT_EFLAGS(EFlags);
12753 IEM_MC_ADVANCE_RIP();
12754 IEM_MC_END();
12755 }
12756 return VINF_SUCCESS;
12757}
12758
12759
12760/** Opcode 0xd3. */
12761FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12762{
12763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12764 PCIEMOPSHIFTSIZES pImpl;
12765 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12766 {
12767 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12768 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12769 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12770 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12771 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12772 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12773 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12774 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12775 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12776 }
12777 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12778
12779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12780 {
12781 /* register */
12782 IEMOP_HLP_NO_LOCK_PREFIX();
12783 switch (pIemCpu->enmEffOpSize)
12784 {
12785 case IEMMODE_16BIT:
12786 IEM_MC_BEGIN(3, 0);
12787 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12788 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12789 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12790 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12791 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12792 IEM_MC_REF_EFLAGS(pEFlags);
12793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12794 IEM_MC_ADVANCE_RIP();
12795 IEM_MC_END();
12796 return VINF_SUCCESS;
12797
12798 case IEMMODE_32BIT:
12799 IEM_MC_BEGIN(3, 0);
12800 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12801 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12803 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12804 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12805 IEM_MC_REF_EFLAGS(pEFlags);
12806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12807 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12808 IEM_MC_ADVANCE_RIP();
12809 IEM_MC_END();
12810 return VINF_SUCCESS;
12811
12812 case IEMMODE_64BIT:
12813 IEM_MC_BEGIN(3, 0);
12814 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12815 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12817 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12818 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12819 IEM_MC_REF_EFLAGS(pEFlags);
12820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12821 IEM_MC_ADVANCE_RIP();
12822 IEM_MC_END();
12823 return VINF_SUCCESS;
12824
12825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12826 }
12827 }
12828 else
12829 {
12830 /* memory */
12831 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12832 switch (pIemCpu->enmEffOpSize)
12833 {
12834 case IEMMODE_16BIT:
12835 IEM_MC_BEGIN(3, 2);
12836 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12837 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12838 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12840
12841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12842 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12843 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12844 IEM_MC_FETCH_EFLAGS(EFlags);
12845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12846
12847 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12848 IEM_MC_COMMIT_EFLAGS(EFlags);
12849 IEM_MC_ADVANCE_RIP();
12850 IEM_MC_END();
12851 return VINF_SUCCESS;
12852
12853 case IEMMODE_32BIT:
12854 IEM_MC_BEGIN(3, 2);
12855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12856 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12857 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12859
12860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12861 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12862 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12863 IEM_MC_FETCH_EFLAGS(EFlags);
12864 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12865
12866 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12867 IEM_MC_COMMIT_EFLAGS(EFlags);
12868 IEM_MC_ADVANCE_RIP();
12869 IEM_MC_END();
12870 return VINF_SUCCESS;
12871
12872 case IEMMODE_64BIT:
12873 IEM_MC_BEGIN(3, 2);
12874 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12875 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12876 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12878
12879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12880 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12881 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12882 IEM_MC_FETCH_EFLAGS(EFlags);
12883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12884
12885 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12886 IEM_MC_COMMIT_EFLAGS(EFlags);
12887 IEM_MC_ADVANCE_RIP();
12888 IEM_MC_END();
12889 return VINF_SUCCESS;
12890
12891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12892 }
12893 }
12894}
12895
12896/** Opcode 0xd4. */
12897FNIEMOP_DEF(iemOp_aam_Ib)
12898{
12899 IEMOP_MNEMONIC("aam Ib");
12900 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12901 IEMOP_HLP_NO_LOCK_PREFIX();
12902 IEMOP_HLP_NO_64BIT();
12903 if (!bImm)
12904 return IEMOP_RAISE_DIVIDE_ERROR();
12905 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12906}
12907
12908
12909/** Opcode 0xd5. */
12910FNIEMOP_DEF(iemOp_aad_Ib)
12911{
12912 IEMOP_MNEMONIC("aad Ib");
12913 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12914 IEMOP_HLP_NO_LOCK_PREFIX();
12915 IEMOP_HLP_NO_64BIT();
12916 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12917}
12918
12919
12920/** Opcode 0xd6. */
12921FNIEMOP_DEF(iemOp_salc)
12922{
12923 IEMOP_MNEMONIC("salc");
12924 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12925 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12927 IEMOP_HLP_NO_64BIT();
12928
12929 IEM_MC_BEGIN(0, 0);
12930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12931 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12932 } IEM_MC_ELSE() {
12933 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12934 } IEM_MC_ENDIF();
12935 IEM_MC_ADVANCE_RIP();
12936 IEM_MC_END();
12937 return VINF_SUCCESS;
12938}
12939
12940
12941/** Opcode 0xd7. */
12942FNIEMOP_DEF(iemOp_xlat)
12943{
12944 IEMOP_MNEMONIC("xlat");
12945 IEMOP_HLP_NO_LOCK_PREFIX();
12946 switch (pIemCpu->enmEffAddrMode)
12947 {
12948 case IEMMODE_16BIT:
12949 IEM_MC_BEGIN(2, 0);
12950 IEM_MC_LOCAL(uint8_t, u8Tmp);
12951 IEM_MC_LOCAL(uint16_t, u16Addr);
12952 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12953 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12954 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12955 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12956 IEM_MC_ADVANCE_RIP();
12957 IEM_MC_END();
12958 return VINF_SUCCESS;
12959
12960 case IEMMODE_32BIT:
12961 IEM_MC_BEGIN(2, 0);
12962 IEM_MC_LOCAL(uint8_t, u8Tmp);
12963 IEM_MC_LOCAL(uint32_t, u32Addr);
12964 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12965 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12966 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12967 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12968 IEM_MC_ADVANCE_RIP();
12969 IEM_MC_END();
12970 return VINF_SUCCESS;
12971
12972 case IEMMODE_64BIT:
12973 IEM_MC_BEGIN(2, 0);
12974 IEM_MC_LOCAL(uint8_t, u8Tmp);
12975 IEM_MC_LOCAL(uint64_t, u64Addr);
12976 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12977 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12978 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12979 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12980 IEM_MC_ADVANCE_RIP();
12981 IEM_MC_END();
12982 return VINF_SUCCESS;
12983
12984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12985 }
12986}
12987
12988
12989/**
12990 * Common worker for FPU instructions working on ST0 and STn, and storing the
12991 * result in ST0.
12992 *
12993 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12994 */
12995FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12996{
12997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12998
12999 IEM_MC_BEGIN(3, 1);
13000 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13001 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13002 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13003 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13004
13005 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13006 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13007 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13008 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13009 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13010 IEM_MC_ELSE()
13011 IEM_MC_FPU_STACK_UNDERFLOW(0);
13012 IEM_MC_ENDIF();
13013 IEM_MC_USED_FPU();
13014 IEM_MC_ADVANCE_RIP();
13015
13016 IEM_MC_END();
13017 return VINF_SUCCESS;
13018}
13019
13020
13021/**
13022 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13023 * flags.
13024 *
13025 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13026 */
13027FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13028{
13029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13030
13031 IEM_MC_BEGIN(3, 1);
13032 IEM_MC_LOCAL(uint16_t, u16Fsw);
13033 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13034 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13035 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13036
13037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13040 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13041 IEM_MC_UPDATE_FSW(u16Fsw);
13042 IEM_MC_ELSE()
13043 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13044 IEM_MC_ENDIF();
13045 IEM_MC_USED_FPU();
13046 IEM_MC_ADVANCE_RIP();
13047
13048 IEM_MC_END();
13049 return VINF_SUCCESS;
13050}
13051
13052
13053/**
13054 * Common worker for FPU instructions working on ST0 and STn, only affecting
13055 * flags, and popping when done.
13056 *
13057 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13058 */
13059FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13060{
13061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13062
13063 IEM_MC_BEGIN(3, 1);
13064 IEM_MC_LOCAL(uint16_t, u16Fsw);
13065 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13066 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13068
13069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13070 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13071 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13072 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13073 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13074 IEM_MC_ELSE()
13075 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13076 IEM_MC_ENDIF();
13077 IEM_MC_USED_FPU();
13078 IEM_MC_ADVANCE_RIP();
13079
13080 IEM_MC_END();
13081 return VINF_SUCCESS;
13082}
13083
13084
13085/** Opcode 0xd8 11/0. */
13086FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13087{
13088 IEMOP_MNEMONIC("fadd st0,stN");
13089 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13090}
13091
13092
13093/** Opcode 0xd8 11/1. */
13094FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13095{
13096 IEMOP_MNEMONIC("fmul st0,stN");
13097 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13098}
13099
13100
13101/** Opcode 0xd8 11/2. */
13102FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13103{
13104 IEMOP_MNEMONIC("fcom st0,stN");
13105 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13106}
13107
13108
13109/** Opcode 0xd8 11/3. */
13110FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13111{
13112 IEMOP_MNEMONIC("fcomp st0,stN");
13113 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13114}
13115
13116
13117/** Opcode 0xd8 11/4. */
13118FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13119{
13120 IEMOP_MNEMONIC("fsub st0,stN");
13121 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13122}
13123
13124
13125/** Opcode 0xd8 11/5. */
13126FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13127{
13128 IEMOP_MNEMONIC("fsubr st0,stN");
13129 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13130}
13131
13132
13133/** Opcode 0xd8 11/6. */
13134FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13135{
13136 IEMOP_MNEMONIC("fdiv st0,stN");
13137 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13138}
13139
13140
13141/** Opcode 0xd8 11/7. */
13142FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13143{
13144 IEMOP_MNEMONIC("fdivr st0,stN");
13145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13146}
13147
13148
13149/**
13150 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13151 * the result in ST0.
13152 *
13153 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13154 */
13155FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13156{
13157 IEM_MC_BEGIN(3, 3);
13158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13159 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13160 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13161 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13163 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13164
13165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13167
13168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13170 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13171
13172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13173 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13174 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13175 IEM_MC_ELSE()
13176 IEM_MC_FPU_STACK_UNDERFLOW(0);
13177 IEM_MC_ENDIF();
13178 IEM_MC_USED_FPU();
13179 IEM_MC_ADVANCE_RIP();
13180
13181 IEM_MC_END();
13182 return VINF_SUCCESS;
13183}
13184
13185
13186/** Opcode 0xd8 !11/0. */
13187FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13188{
13189 IEMOP_MNEMONIC("fadd st0,m32r");
13190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13191}
13192
13193
13194/** Opcode 0xd8 !11/1. */
13195FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13196{
13197 IEMOP_MNEMONIC("fmul st0,m32r");
13198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13199}
13200
13201
13202/** Opcode 0xd8 !11/2. */
13203FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13204{
13205 IEMOP_MNEMONIC("fcom st0,m32r");
13206
13207 IEM_MC_BEGIN(3, 3);
13208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13209 IEM_MC_LOCAL(uint16_t, u16Fsw);
13210 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13213 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13214
13215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13217
13218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13220 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13221
13222 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13223 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13224 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13225 IEM_MC_ELSE()
13226 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13227 IEM_MC_ENDIF();
13228 IEM_MC_USED_FPU();
13229 IEM_MC_ADVANCE_RIP();
13230
13231 IEM_MC_END();
13232 return VINF_SUCCESS;
13233}
13234
13235
13236/** Opcode 0xd8 !11/3. */
13237FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13238{
13239 IEMOP_MNEMONIC("fcomp st0,m32r");
13240
13241 IEM_MC_BEGIN(3, 3);
13242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13243 IEM_MC_LOCAL(uint16_t, u16Fsw);
13244 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13246 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13247 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13248
13249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13251
13252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13254 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13255
13256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13259 IEM_MC_ELSE()
13260 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13261 IEM_MC_ENDIF();
13262 IEM_MC_USED_FPU();
13263 IEM_MC_ADVANCE_RIP();
13264
13265 IEM_MC_END();
13266 return VINF_SUCCESS;
13267}
13268
13269
13270/** Opcode 0xd8 !11/4. */
13271FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13272{
13273 IEMOP_MNEMONIC("fsub st0,m32r");
13274 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13275}
13276
13277
13278/** Opcode 0xd8 !11/5. */
13279FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13280{
13281 IEMOP_MNEMONIC("fsubr st0,m32r");
13282 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13283}
13284
13285
13286/** Opcode 0xd8 !11/6. */
13287FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13288{
13289 IEMOP_MNEMONIC("fdiv st0,m32r");
13290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13291}
13292
13293
13294/** Opcode 0xd8 !11/7. */
13295FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13296{
13297 IEMOP_MNEMONIC("fdivr st0,m32r");
13298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13299}
13300
13301
13302/** Opcode 0xd8. */
13303FNIEMOP_DEF(iemOp_EscF0)
13304{
13305 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13307
13308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13309 {
13310 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13311 {
13312 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13313 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13314 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13315 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13316 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13317 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13318 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13319 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13321 }
13322 }
13323 else
13324 {
13325 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13326 {
13327 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13328 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13329 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13330 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13331 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13332 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13333 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13334 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13336 }
13337 }
13338}
13339
13340
13341/** Opcode 0xd9 /0 mem32real
13342 * @sa iemOp_fld_m64r */
13343FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13344{
13345 IEMOP_MNEMONIC("fld m32r");
13346
13347 IEM_MC_BEGIN(2, 3);
13348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13349 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13350 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13351 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13352 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13353
13354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13356
13357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13359 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13360
13361 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13362 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13363 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13364 IEM_MC_ELSE()
13365 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13366 IEM_MC_ENDIF();
13367 IEM_MC_USED_FPU();
13368 IEM_MC_ADVANCE_RIP();
13369
13370 IEM_MC_END();
13371 return VINF_SUCCESS;
13372}
13373
13374
13375/** Opcode 0xd9 !11/2 mem32real */
13376FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13377{
13378 IEMOP_MNEMONIC("fst m32r");
13379 IEM_MC_BEGIN(3, 2);
13380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13381 IEM_MC_LOCAL(uint16_t, u16Fsw);
13382 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13383 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13384 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13385
13386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13390
13391 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13392 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13393 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13394 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13395 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13396 IEM_MC_ELSE()
13397 IEM_MC_IF_FCW_IM()
13398 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13399 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13400 IEM_MC_ENDIF();
13401 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13402 IEM_MC_ENDIF();
13403 IEM_MC_USED_FPU();
13404 IEM_MC_ADVANCE_RIP();
13405
13406 IEM_MC_END();
13407 return VINF_SUCCESS;
13408}
13409
13410
13411/** Opcode 0xd9 !11/3 */
13412FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13413{
13414 IEMOP_MNEMONIC("fstp m32r");
13415 IEM_MC_BEGIN(3, 2);
13416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13417 IEM_MC_LOCAL(uint16_t, u16Fsw);
13418 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13419 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13420 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13421
13422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13426
13427 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13428 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13429 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13430 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13431 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13432 IEM_MC_ELSE()
13433 IEM_MC_IF_FCW_IM()
13434 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13435 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13436 IEM_MC_ENDIF();
13437 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13438 IEM_MC_ENDIF();
13439 IEM_MC_USED_FPU();
13440 IEM_MC_ADVANCE_RIP();
13441
13442 IEM_MC_END();
13443 return VINF_SUCCESS;
13444}
13445
13446
13447/** Opcode 0xd9 !11/4 */
13448FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13449{
13450 IEMOP_MNEMONIC("fldenv m14/28byte");
13451 IEM_MC_BEGIN(3, 0);
13452 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13453 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13454 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13458 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13459 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13460 IEM_MC_END();
13461 return VINF_SUCCESS;
13462}
13463
13464
13465/** Opcode 0xd9 !11/5 */
13466FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13467{
13468 IEMOP_MNEMONIC("fldcw m2byte");
13469 IEM_MC_BEGIN(1, 1);
13470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13471 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13475 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13476 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13477 IEM_MC_END();
13478 return VINF_SUCCESS;
13479}
13480
13481
13482/** Opcode 0xd9 !11/6 */
13483FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13484{
13485 IEMOP_MNEMONIC("fstenv m14/m28byte");
13486 IEM_MC_BEGIN(3, 0);
13487 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13488 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13489 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13493 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13494 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13495 IEM_MC_END();
13496 return VINF_SUCCESS;
13497}
13498
13499
13500/** Opcode 0xd9 !11/7 */
13501FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13502{
13503 IEMOP_MNEMONIC("fnstcw m2byte");
13504 IEM_MC_BEGIN(2, 0);
13505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13506 IEM_MC_LOCAL(uint16_t, u16Fcw);
13507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13510 IEM_MC_FETCH_FCW(u16Fcw);
13511 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13512 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13513 IEM_MC_END();
13514 return VINF_SUCCESS;
13515}
13516
13517
13518/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13519FNIEMOP_DEF(iemOp_fnop)
13520{
13521 IEMOP_MNEMONIC("fnop");
13522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13523
13524 IEM_MC_BEGIN(0, 0);
13525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13527 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13528 * intel optimizations. Investigate. */
13529 IEM_MC_UPDATE_FPU_OPCODE_IP();
13530 IEM_MC_USED_FPU();
13531 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13532 IEM_MC_END();
13533 return VINF_SUCCESS;
13534}
13535
13536
13537/** Opcode 0xd9 11/0 stN */
13538FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13539{
13540 IEMOP_MNEMONIC("fld stN");
13541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13542
13543 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13544 * indicates that it does. */
13545 IEM_MC_BEGIN(0, 2);
13546 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13547 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13549 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13550 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13551 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13552 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13553 IEM_MC_ELSE()
13554 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13555 IEM_MC_ENDIF();
13556 IEM_MC_USED_FPU();
13557 IEM_MC_ADVANCE_RIP();
13558 IEM_MC_END();
13559
13560 return VINF_SUCCESS;
13561}
13562
13563
13564/** Opcode 0xd9 11/3 stN */
13565FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13566{
13567 IEMOP_MNEMONIC("fxch stN");
13568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13569
13570 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13571 * indicates that it does. */
13572 IEM_MC_BEGIN(1, 3);
13573 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13574 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13575 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13576 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13579 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13580 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13581 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13582 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13583 IEM_MC_ELSE()
13584 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13585 IEM_MC_ENDIF();
13586 IEM_MC_USED_FPU();
13587 IEM_MC_ADVANCE_RIP();
13588 IEM_MC_END();
13589
13590 return VINF_SUCCESS;
13591}
13592
13593
13594/** Opcode 0xd9 11/4, 0xdd 11/2. */
13595FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13596{
13597 IEMOP_MNEMONIC("fstp st0,stN");
13598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13599
13600 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13601 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13602 if (!iDstReg)
13603 {
13604 IEM_MC_BEGIN(0, 1);
13605 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13608 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13609 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13610 IEM_MC_ELSE()
13611 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13612 IEM_MC_ENDIF();
13613 IEM_MC_USED_FPU();
13614 IEM_MC_ADVANCE_RIP();
13615 IEM_MC_END();
13616 }
13617 else
13618 {
13619 IEM_MC_BEGIN(0, 2);
13620 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13621 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13625 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13626 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13627 IEM_MC_ELSE()
13628 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13629 IEM_MC_ENDIF();
13630 IEM_MC_USED_FPU();
13631 IEM_MC_ADVANCE_RIP();
13632 IEM_MC_END();
13633 }
13634 return VINF_SUCCESS;
13635}
13636
13637
13638/**
13639 * Common worker for FPU instructions working on ST0 and replaces it with the
13640 * result, i.e. unary operators.
13641 *
13642 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13643 */
13644FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13645{
13646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13647
13648 IEM_MC_BEGIN(2, 1);
13649 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13650 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13652
13653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13655 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13656 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13657 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13658 IEM_MC_ELSE()
13659 IEM_MC_FPU_STACK_UNDERFLOW(0);
13660 IEM_MC_ENDIF();
13661 IEM_MC_USED_FPU();
13662 IEM_MC_ADVANCE_RIP();
13663
13664 IEM_MC_END();
13665 return VINF_SUCCESS;
13666}
13667
13668
13669/** Opcode 0xd9 0xe0. */
13670FNIEMOP_DEF(iemOp_fchs)
13671{
13672 IEMOP_MNEMONIC("fchs st0");
13673 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13674}
13675
13676
13677/** Opcode 0xd9 0xe1. */
13678FNIEMOP_DEF(iemOp_fabs)
13679{
13680 IEMOP_MNEMONIC("fabs st0");
13681 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13682}
13683
13684
13685/**
13686 * Common worker for FPU instructions working on ST0 and only returns FSW.
13687 *
13688 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13689 */
13690FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13691{
13692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13693
13694 IEM_MC_BEGIN(2, 1);
13695 IEM_MC_LOCAL(uint16_t, u16Fsw);
13696 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13698
13699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13701 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13702 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13703 IEM_MC_UPDATE_FSW(u16Fsw);
13704 IEM_MC_ELSE()
13705 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13706 IEM_MC_ENDIF();
13707 IEM_MC_USED_FPU();
13708 IEM_MC_ADVANCE_RIP();
13709
13710 IEM_MC_END();
13711 return VINF_SUCCESS;
13712}
13713
13714
13715/** Opcode 0xd9 0xe4. */
13716FNIEMOP_DEF(iemOp_ftst)
13717{
13718 IEMOP_MNEMONIC("ftst st0");
13719 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13720}
13721
13722
13723/** Opcode 0xd9 0xe5. */
13724FNIEMOP_DEF(iemOp_fxam)
13725{
13726 IEMOP_MNEMONIC("fxam st0");
13727 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13728}
13729
13730
13731/**
13732 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13733 *
13734 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13735 */
13736FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13737{
13738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13739
13740 IEM_MC_BEGIN(1, 1);
13741 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13742 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13743
13744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13746 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13747 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13748 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13749 IEM_MC_ELSE()
13750 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13751 IEM_MC_ENDIF();
13752 IEM_MC_USED_FPU();
13753 IEM_MC_ADVANCE_RIP();
13754
13755 IEM_MC_END();
13756 return VINF_SUCCESS;
13757}
13758
13759
13760/** Opcode 0xd9 0xe8. */
13761FNIEMOP_DEF(iemOp_fld1)
13762{
13763 IEMOP_MNEMONIC("fld1");
13764 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13765}
13766
13767
13768/** Opcode 0xd9 0xe9. */
13769FNIEMOP_DEF(iemOp_fldl2t)
13770{
13771 IEMOP_MNEMONIC("fldl2t");
13772 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13773}
13774
13775
13776/** Opcode 0xd9 0xea. */
13777FNIEMOP_DEF(iemOp_fldl2e)
13778{
13779 IEMOP_MNEMONIC("fldl2e");
13780 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13781}
13782
13783/** Opcode 0xd9 0xeb. */
13784FNIEMOP_DEF(iemOp_fldpi)
13785{
13786 IEMOP_MNEMONIC("fldpi");
13787 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13788}
13789
13790
13791/** Opcode 0xd9 0xec. */
13792FNIEMOP_DEF(iemOp_fldlg2)
13793{
13794 IEMOP_MNEMONIC("fldlg2");
13795 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13796}
13797
13798/** Opcode 0xd9 0xed. */
13799FNIEMOP_DEF(iemOp_fldln2)
13800{
13801 IEMOP_MNEMONIC("fldln2");
13802 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13803}
13804
13805
13806/** Opcode 0xd9 0xee. */
13807FNIEMOP_DEF(iemOp_fldz)
13808{
13809 IEMOP_MNEMONIC("fldz");
13810 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13811}
13812
13813
13814/** Opcode 0xd9 0xf0. */
13815FNIEMOP_DEF(iemOp_f2xm1)
13816{
13817 IEMOP_MNEMONIC("f2xm1 st0");
13818 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13819}
13820
13821
13822/** Opcode 0xd9 0xf1. */
13823FNIEMOP_DEF(iemOp_fylx2)
13824{
13825 IEMOP_MNEMONIC("fylx2 st0");
13826 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13827}
13828
13829
13830/**
13831 * Common worker for FPU instructions working on ST0 and having two outputs, one
13832 * replacing ST0 and one pushed onto the stack.
13833 *
13834 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13835 */
13836FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13837{
13838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13839
13840 IEM_MC_BEGIN(2, 1);
13841 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13842 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13843 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13844
13845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13846 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13848 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13849 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13850 IEM_MC_ELSE()
13851 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13852 IEM_MC_ENDIF();
13853 IEM_MC_USED_FPU();
13854 IEM_MC_ADVANCE_RIP();
13855
13856 IEM_MC_END();
13857 return VINF_SUCCESS;
13858}
13859
13860
13861/** Opcode 0xd9 0xf2. */
13862FNIEMOP_DEF(iemOp_fptan)
13863{
13864 IEMOP_MNEMONIC("fptan st0");
13865 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13866}
13867
13868
13869/**
13870 * Common worker for FPU instructions working on STn and ST0, storing the result
13871 * in STn, and popping the stack unless IE, DE or ZE was raised.
13872 *
13873 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13874 */
13875FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13876{
13877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13878
13879 IEM_MC_BEGIN(3, 1);
13880 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13881 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13883 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13884
13885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13886 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13887
13888 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13889 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13890 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13891 IEM_MC_ELSE()
13892 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13893 IEM_MC_ENDIF();
13894 IEM_MC_USED_FPU();
13895 IEM_MC_ADVANCE_RIP();
13896
13897 IEM_MC_END();
13898 return VINF_SUCCESS;
13899}
13900
13901
13902/** Opcode 0xd9 0xf3. */
13903FNIEMOP_DEF(iemOp_fpatan)
13904{
13905 IEMOP_MNEMONIC("fpatan st1,st0");
13906 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13907}
13908
13909
13910/** Opcode 0xd9 0xf4. */
13911FNIEMOP_DEF(iemOp_fxtract)
13912{
13913 IEMOP_MNEMONIC("fxtract st0");
13914 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13915}
13916
13917
13918/** Opcode 0xd9 0xf5. */
13919FNIEMOP_DEF(iemOp_fprem1)
13920{
13921 IEMOP_MNEMONIC("fprem1 st0, st1");
13922 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13923}
13924
13925
13926/** Opcode 0xd9 0xf6. */
13927FNIEMOP_DEF(iemOp_fdecstp)
13928{
13929 IEMOP_MNEMONIC("fdecstp");
13930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13931 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13932 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13933 * FINCSTP and FDECSTP. */
13934
13935 IEM_MC_BEGIN(0,0);
13936
13937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13939
13940 IEM_MC_FPU_STACK_DEC_TOP();
13941 IEM_MC_UPDATE_FSW_CONST(0);
13942
13943 IEM_MC_USED_FPU();
13944 IEM_MC_ADVANCE_RIP();
13945 IEM_MC_END();
13946 return VINF_SUCCESS;
13947}
13948
13949
13950/** Opcode 0xd9 0xf7. */
13951FNIEMOP_DEF(iemOp_fincstp)
13952{
13953 IEMOP_MNEMONIC("fincstp");
13954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13955 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13956 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13957 * FINCSTP and FDECSTP. */
13958
13959 IEM_MC_BEGIN(0,0);
13960
13961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13963
13964 IEM_MC_FPU_STACK_INC_TOP();
13965 IEM_MC_UPDATE_FSW_CONST(0);
13966
13967 IEM_MC_USED_FPU();
13968 IEM_MC_ADVANCE_RIP();
13969 IEM_MC_END();
13970 return VINF_SUCCESS;
13971}
13972
13973
13974/** Opcode 0xd9 0xf8. */
13975FNIEMOP_DEF(iemOp_fprem)
13976{
13977 IEMOP_MNEMONIC("fprem st0, st1");
13978 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13979}
13980
13981
13982/** Opcode 0xd9 0xf9. */
13983FNIEMOP_DEF(iemOp_fyl2xp1)
13984{
13985 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13986 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13987}
13988
13989
13990/** Opcode 0xd9 0xfa. */
13991FNIEMOP_DEF(iemOp_fsqrt)
13992{
13993 IEMOP_MNEMONIC("fsqrt st0");
13994 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13995}
13996
13997
13998/** Opcode 0xd9 0xfb. */
13999FNIEMOP_DEF(iemOp_fsincos)
14000{
14001 IEMOP_MNEMONIC("fsincos st0");
14002 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14003}
14004
14005
14006/** Opcode 0xd9 0xfc. */
14007FNIEMOP_DEF(iemOp_frndint)
14008{
14009 IEMOP_MNEMONIC("frndint st0");
14010 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14011}
14012
14013
14014/** Opcode 0xd9 0xfd. */
14015FNIEMOP_DEF(iemOp_fscale)
14016{
14017 IEMOP_MNEMONIC("fscale st0, st1");
14018 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14019}
14020
14021
14022/** Opcode 0xd9 0xfe. */
14023FNIEMOP_DEF(iemOp_fsin)
14024{
14025 IEMOP_MNEMONIC("fsin st0");
14026 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14027}
14028
14029
14030/** Opcode 0xd9 0xff. */
14031FNIEMOP_DEF(iemOp_fcos)
14032{
14033 IEMOP_MNEMONIC("fcos st0");
14034 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14035}
14036
14037
14038/** Used by iemOp_EscF1. */
14039static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14040{
14041 /* 0xe0 */ iemOp_fchs,
14042 /* 0xe1 */ iemOp_fabs,
14043 /* 0xe2 */ iemOp_Invalid,
14044 /* 0xe3 */ iemOp_Invalid,
14045 /* 0xe4 */ iemOp_ftst,
14046 /* 0xe5 */ iemOp_fxam,
14047 /* 0xe6 */ iemOp_Invalid,
14048 /* 0xe7 */ iemOp_Invalid,
14049 /* 0xe8 */ iemOp_fld1,
14050 /* 0xe9 */ iemOp_fldl2t,
14051 /* 0xea */ iemOp_fldl2e,
14052 /* 0xeb */ iemOp_fldpi,
14053 /* 0xec */ iemOp_fldlg2,
14054 /* 0xed */ iemOp_fldln2,
14055 /* 0xee */ iemOp_fldz,
14056 /* 0xef */ iemOp_Invalid,
14057 /* 0xf0 */ iemOp_f2xm1,
14058 /* 0xf1 */ iemOp_fylx2,
14059 /* 0xf2 */ iemOp_fptan,
14060 /* 0xf3 */ iemOp_fpatan,
14061 /* 0xf4 */ iemOp_fxtract,
14062 /* 0xf5 */ iemOp_fprem1,
14063 /* 0xf6 */ iemOp_fdecstp,
14064 /* 0xf7 */ iemOp_fincstp,
14065 /* 0xf8 */ iemOp_fprem,
14066 /* 0xf9 */ iemOp_fyl2xp1,
14067 /* 0xfa */ iemOp_fsqrt,
14068 /* 0xfb */ iemOp_fsincos,
14069 /* 0xfc */ iemOp_frndint,
14070 /* 0xfd */ iemOp_fscale,
14071 /* 0xfe */ iemOp_fsin,
14072 /* 0xff */ iemOp_fcos
14073};
14074
14075
14076/** Opcode 0xd9. */
14077FNIEMOP_DEF(iemOp_EscF1)
14078{
14079 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14082 {
14083 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14084 {
14085 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14086 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14087 case 2:
14088 if (bRm == 0xd0)
14089 return FNIEMOP_CALL(iemOp_fnop);
14090 return IEMOP_RAISE_INVALID_OPCODE();
14091 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14092 case 4:
14093 case 5:
14094 case 6:
14095 case 7:
14096 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14097 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14099 }
14100 }
14101 else
14102 {
14103 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14104 {
14105 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14106 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14107 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14108 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14109 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14110 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14111 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14112 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14114 }
14115 }
14116}
14117
14118
14119/** Opcode 0xda 11/0. */
14120FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14121{
14122 IEMOP_MNEMONIC("fcmovb st0,stN");
14123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14124
14125 IEM_MC_BEGIN(0, 1);
14126 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14127
14128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14130
14131 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14133 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14134 IEM_MC_ENDIF();
14135 IEM_MC_UPDATE_FPU_OPCODE_IP();
14136 IEM_MC_ELSE()
14137 IEM_MC_FPU_STACK_UNDERFLOW(0);
14138 IEM_MC_ENDIF();
14139 IEM_MC_USED_FPU();
14140 IEM_MC_ADVANCE_RIP();
14141
14142 IEM_MC_END();
14143 return VINF_SUCCESS;
14144}
14145
14146
14147/** Opcode 0xda 11/1. */
14148FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14149{
14150 IEMOP_MNEMONIC("fcmove st0,stN");
14151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14152
14153 IEM_MC_BEGIN(0, 1);
14154 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14155
14156 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14157 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14158
14159 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14160 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14161 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14162 IEM_MC_ENDIF();
14163 IEM_MC_UPDATE_FPU_OPCODE_IP();
14164 IEM_MC_ELSE()
14165 IEM_MC_FPU_STACK_UNDERFLOW(0);
14166 IEM_MC_ENDIF();
14167 IEM_MC_USED_FPU();
14168 IEM_MC_ADVANCE_RIP();
14169
14170 IEM_MC_END();
14171 return VINF_SUCCESS;
14172}
14173
14174
14175/** Opcode 0xda 11/2. */
14176FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14177{
14178 IEMOP_MNEMONIC("fcmovbe st0,stN");
14179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14180
14181 IEM_MC_BEGIN(0, 1);
14182 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14183
14184 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14185 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14186
14187 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14188 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14189 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14190 IEM_MC_ENDIF();
14191 IEM_MC_UPDATE_FPU_OPCODE_IP();
14192 IEM_MC_ELSE()
14193 IEM_MC_FPU_STACK_UNDERFLOW(0);
14194 IEM_MC_ENDIF();
14195 IEM_MC_USED_FPU();
14196 IEM_MC_ADVANCE_RIP();
14197
14198 IEM_MC_END();
14199 return VINF_SUCCESS;
14200}
14201
14202
14203/** Opcode 0xda 11/3. */
14204FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14205{
14206 IEMOP_MNEMONIC("fcmovu st0,stN");
14207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14208
14209 IEM_MC_BEGIN(0, 1);
14210 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14211
14212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14213 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14214
14215 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14216 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14217 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14218 IEM_MC_ENDIF();
14219 IEM_MC_UPDATE_FPU_OPCODE_IP();
14220 IEM_MC_ELSE()
14221 IEM_MC_FPU_STACK_UNDERFLOW(0);
14222 IEM_MC_ENDIF();
14223 IEM_MC_USED_FPU();
14224 IEM_MC_ADVANCE_RIP();
14225
14226 IEM_MC_END();
14227 return VINF_SUCCESS;
14228}
14229
14230
14231/**
14232 * Common worker for FPU instructions working on ST0 and STn, only affecting
14233 * flags, and popping twice when done.
14234 *
14235 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14236 */
14237FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14238{
14239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14240
14241 IEM_MC_BEGIN(3, 1);
14242 IEM_MC_LOCAL(uint16_t, u16Fsw);
14243 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14244 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14245 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14246
14247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14249 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14250 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14251 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14252 IEM_MC_ELSE()
14253 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14254 IEM_MC_ENDIF();
14255 IEM_MC_USED_FPU();
14256 IEM_MC_ADVANCE_RIP();
14257
14258 IEM_MC_END();
14259 return VINF_SUCCESS;
14260}
14261
14262
14263/** Opcode 0xda 0xe9. */
14264FNIEMOP_DEF(iemOp_fucompp)
14265{
14266 IEMOP_MNEMONIC("fucompp st0,stN");
14267 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14268}
14269
14270
14271/**
14272 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14273 * the result in ST0.
14274 *
14275 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14276 */
14277FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14278{
14279 IEM_MC_BEGIN(3, 3);
14280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14281 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14282 IEM_MC_LOCAL(int32_t, i32Val2);
14283 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14284 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14285 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14286
14287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14289
14290 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14291 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14292 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14293
14294 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14295 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14296 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14297 IEM_MC_ELSE()
14298 IEM_MC_FPU_STACK_UNDERFLOW(0);
14299 IEM_MC_ENDIF();
14300 IEM_MC_USED_FPU();
14301 IEM_MC_ADVANCE_RIP();
14302
14303 IEM_MC_END();
14304 return VINF_SUCCESS;
14305}
14306
14307
14308/** Opcode 0xda !11/0. */
14309FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14310{
14311 IEMOP_MNEMONIC("fiadd m32i");
14312 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14313}
14314
14315
14316/** Opcode 0xda !11/1. */
14317FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14318{
14319 IEMOP_MNEMONIC("fimul m32i");
14320 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14321}
14322
14323
14324/** Opcode 0xda !11/2. */
14325FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14326{
14327 IEMOP_MNEMONIC("ficom st0,m32i");
14328
14329 IEM_MC_BEGIN(3, 3);
14330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14331 IEM_MC_LOCAL(uint16_t, u16Fsw);
14332 IEM_MC_LOCAL(int32_t, i32Val2);
14333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14334 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14335 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14336
14337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14339
14340 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14341 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14342 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14343
14344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14345 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14346 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14347 IEM_MC_ELSE()
14348 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14349 IEM_MC_ENDIF();
14350 IEM_MC_USED_FPU();
14351 IEM_MC_ADVANCE_RIP();
14352
14353 IEM_MC_END();
14354 return VINF_SUCCESS;
14355}
14356
14357
14358/** Opcode 0xda !11/3. */
14359FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14360{
14361 IEMOP_MNEMONIC("ficomp st0,m32i");
14362
14363 IEM_MC_BEGIN(3, 3);
14364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14365 IEM_MC_LOCAL(uint16_t, u16Fsw);
14366 IEM_MC_LOCAL(int32_t, i32Val2);
14367 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14368 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14369 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14370
14371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14373
14374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14376 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14377
14378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14380 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14381 IEM_MC_ELSE()
14382 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14383 IEM_MC_ENDIF();
14384 IEM_MC_USED_FPU();
14385 IEM_MC_ADVANCE_RIP();
14386
14387 IEM_MC_END();
14388 return VINF_SUCCESS;
14389}
14390
14391
14392/** Opcode 0xda !11/4. */
14393FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14394{
14395 IEMOP_MNEMONIC("fisub m32i");
14396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14397}
14398
14399
14400/** Opcode 0xda !11/5. */
14401FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14402{
14403 IEMOP_MNEMONIC("fisubr m32i");
14404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14405}
14406
14407
14408/** Opcode 0xda !11/6. */
14409FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14410{
14411 IEMOP_MNEMONIC("fidiv m32i");
14412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14413}
14414
14415
14416/** Opcode 0xda !11/7. */
14417FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14418{
14419 IEMOP_MNEMONIC("fidivr m32i");
14420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14421}
14422
14423
14424/** Opcode 0xda. */
14425FNIEMOP_DEF(iemOp_EscF2)
14426{
14427 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14430 {
14431 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14432 {
14433 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14434 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14435 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14436 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14437 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14438 case 5:
14439 if (bRm == 0xe9)
14440 return FNIEMOP_CALL(iemOp_fucompp);
14441 return IEMOP_RAISE_INVALID_OPCODE();
14442 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14443 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14445 }
14446 }
14447 else
14448 {
14449 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14450 {
14451 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14452 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14453 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14454 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14455 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14456 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14457 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14458 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14460 }
14461 }
14462}
14463
14464
14465/** Opcode 0xdb !11/0. */
14466FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14467{
14468 IEMOP_MNEMONIC("fild m32i");
14469
14470 IEM_MC_BEGIN(2, 3);
14471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14472 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14473 IEM_MC_LOCAL(int32_t, i32Val);
14474 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14475 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14476
14477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14479
14480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14482 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14483
14484 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14485 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14486 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14487 IEM_MC_ELSE()
14488 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14489 IEM_MC_ENDIF();
14490 IEM_MC_USED_FPU();
14491 IEM_MC_ADVANCE_RIP();
14492
14493 IEM_MC_END();
14494 return VINF_SUCCESS;
14495}
14496
14497
14498/** Opcode 0xdb !11/1. */
14499FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14500{
14501 IEMOP_MNEMONIC("fisttp m32i");
14502 IEM_MC_BEGIN(3, 2);
14503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14504 IEM_MC_LOCAL(uint16_t, u16Fsw);
14505 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14506 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14507 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14508
14509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14513
14514 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14515 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14516 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14517 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14518 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14519 IEM_MC_ELSE()
14520 IEM_MC_IF_FCW_IM()
14521 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14522 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14523 IEM_MC_ENDIF();
14524 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14525 IEM_MC_ENDIF();
14526 IEM_MC_USED_FPU();
14527 IEM_MC_ADVANCE_RIP();
14528
14529 IEM_MC_END();
14530 return VINF_SUCCESS;
14531}
14532
14533
14534/** Opcode 0xdb !11/2. */
14535FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14536{
14537 IEMOP_MNEMONIC("fist m32i");
14538 IEM_MC_BEGIN(3, 2);
14539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14540 IEM_MC_LOCAL(uint16_t, u16Fsw);
14541 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14542 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14543 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14544
14545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14547 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14548 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14549
14550 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14551 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14552 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14553 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14554 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14555 IEM_MC_ELSE()
14556 IEM_MC_IF_FCW_IM()
14557 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14558 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14559 IEM_MC_ENDIF();
14560 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14561 IEM_MC_ENDIF();
14562 IEM_MC_USED_FPU();
14563 IEM_MC_ADVANCE_RIP();
14564
14565 IEM_MC_END();
14566 return VINF_SUCCESS;
14567}
14568
14569
14570/** Opcode 0xdb !11/3. */
14571FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14572{
14573 IEMOP_MNEMONIC("fisttp m32i");
14574 IEM_MC_BEGIN(3, 2);
14575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14576 IEM_MC_LOCAL(uint16_t, u16Fsw);
14577 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14578 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14580
14581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14584 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14585
14586 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14587 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14588 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14589 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14590 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14591 IEM_MC_ELSE()
14592 IEM_MC_IF_FCW_IM()
14593 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14594 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14595 IEM_MC_ENDIF();
14596 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14597 IEM_MC_ENDIF();
14598 IEM_MC_USED_FPU();
14599 IEM_MC_ADVANCE_RIP();
14600
14601 IEM_MC_END();
14602 return VINF_SUCCESS;
14603}
14604
14605
14606/** Opcode 0xdb !11/5. */
14607FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14608{
14609 IEMOP_MNEMONIC("fld m80r");
14610
14611 IEM_MC_BEGIN(2, 3);
14612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14613 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14614 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14615 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14616 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14617
14618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14620
14621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14623 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14624
14625 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14626 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14627 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14628 IEM_MC_ELSE()
14629 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14630 IEM_MC_ENDIF();
14631 IEM_MC_USED_FPU();
14632 IEM_MC_ADVANCE_RIP();
14633
14634 IEM_MC_END();
14635 return VINF_SUCCESS;
14636}
14637
14638
14639/** Opcode 0xdb !11/7. */
14640FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14641{
14642 IEMOP_MNEMONIC("fstp m80r");
14643 IEM_MC_BEGIN(3, 2);
14644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14645 IEM_MC_LOCAL(uint16_t, u16Fsw);
14646 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14647 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14648 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14649
14650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14654
14655 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14656 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14657 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14658 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14659 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14660 IEM_MC_ELSE()
14661 IEM_MC_IF_FCW_IM()
14662 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14663 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14664 IEM_MC_ENDIF();
14665 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14666 IEM_MC_ENDIF();
14667 IEM_MC_USED_FPU();
14668 IEM_MC_ADVANCE_RIP();
14669
14670 IEM_MC_END();
14671 return VINF_SUCCESS;
14672}
14673
14674
14675/** Opcode 0xdb 11/0. */
14676FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14677{
14678 IEMOP_MNEMONIC("fcmovnb st0,stN");
14679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14680
14681 IEM_MC_BEGIN(0, 1);
14682 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14683
14684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14685 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14686
14687 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14688 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14689 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14690 IEM_MC_ENDIF();
14691 IEM_MC_UPDATE_FPU_OPCODE_IP();
14692 IEM_MC_ELSE()
14693 IEM_MC_FPU_STACK_UNDERFLOW(0);
14694 IEM_MC_ENDIF();
14695 IEM_MC_USED_FPU();
14696 IEM_MC_ADVANCE_RIP();
14697
14698 IEM_MC_END();
14699 return VINF_SUCCESS;
14700}
14701
14702
14703/** Opcode 0xdb 11/1. */
14704FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14705{
14706 IEMOP_MNEMONIC("fcmovne st0,stN");
14707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14708
14709 IEM_MC_BEGIN(0, 1);
14710 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14711
14712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14714
14715 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14716 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14717 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14718 IEM_MC_ENDIF();
14719 IEM_MC_UPDATE_FPU_OPCODE_IP();
14720 IEM_MC_ELSE()
14721 IEM_MC_FPU_STACK_UNDERFLOW(0);
14722 IEM_MC_ENDIF();
14723 IEM_MC_USED_FPU();
14724 IEM_MC_ADVANCE_RIP();
14725
14726 IEM_MC_END();
14727 return VINF_SUCCESS;
14728}
14729
14730
14731/** Opcode 0xdb 11/2. */
14732FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14733{
14734 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14736
14737 IEM_MC_BEGIN(0, 1);
14738 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14739
14740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14742
14743 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14744 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14745 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14746 IEM_MC_ENDIF();
14747 IEM_MC_UPDATE_FPU_OPCODE_IP();
14748 IEM_MC_ELSE()
14749 IEM_MC_FPU_STACK_UNDERFLOW(0);
14750 IEM_MC_ENDIF();
14751 IEM_MC_USED_FPU();
14752 IEM_MC_ADVANCE_RIP();
14753
14754 IEM_MC_END();
14755 return VINF_SUCCESS;
14756}
14757
14758
14759/** Opcode 0xdb 11/3. */
14760FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14761{
14762 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14764
14765 IEM_MC_BEGIN(0, 1);
14766 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14767
14768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14770
14771 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14772 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14773 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14774 IEM_MC_ENDIF();
14775 IEM_MC_UPDATE_FPU_OPCODE_IP();
14776 IEM_MC_ELSE()
14777 IEM_MC_FPU_STACK_UNDERFLOW(0);
14778 IEM_MC_ENDIF();
14779 IEM_MC_USED_FPU();
14780 IEM_MC_ADVANCE_RIP();
14781
14782 IEM_MC_END();
14783 return VINF_SUCCESS;
14784}
14785
14786
14787/** Opcode 0xdb 0xe0. */
14788FNIEMOP_DEF(iemOp_fneni)
14789{
14790 IEMOP_MNEMONIC("fneni (8087/ign)");
14791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14792 IEM_MC_BEGIN(0,0);
14793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14794 IEM_MC_ADVANCE_RIP();
14795 IEM_MC_END();
14796 return VINF_SUCCESS;
14797}
14798
14799
14800/** Opcode 0xdb 0xe1. */
14801FNIEMOP_DEF(iemOp_fndisi)
14802{
14803 IEMOP_MNEMONIC("fndisi (8087/ign)");
14804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14805 IEM_MC_BEGIN(0,0);
14806 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14807 IEM_MC_ADVANCE_RIP();
14808 IEM_MC_END();
14809 return VINF_SUCCESS;
14810}
14811
14812
14813/** Opcode 0xdb 0xe2. */
14814FNIEMOP_DEF(iemOp_fnclex)
14815{
14816 IEMOP_MNEMONIC("fnclex");
14817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14818
14819 IEM_MC_BEGIN(0,0);
14820 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14821 IEM_MC_CLEAR_FSW_EX();
14822 IEM_MC_ADVANCE_RIP();
14823 IEM_MC_END();
14824 return VINF_SUCCESS;
14825}
14826
14827
14828/** Opcode 0xdb 0xe3. */
14829FNIEMOP_DEF(iemOp_fninit)
14830{
14831 IEMOP_MNEMONIC("fninit");
14832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14833 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14834}
14835
14836
14837/** Opcode 0xdb 0xe4. */
14838FNIEMOP_DEF(iemOp_fnsetpm)
14839{
14840 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14842 IEM_MC_BEGIN(0,0);
14843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14844 IEM_MC_ADVANCE_RIP();
14845 IEM_MC_END();
14846 return VINF_SUCCESS;
14847}
14848
14849
14850/** Opcode 0xdb 0xe5. */
14851FNIEMOP_DEF(iemOp_frstpm)
14852{
14853 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14854#if 0 /* #UDs on newer CPUs */
14855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14856 IEM_MC_BEGIN(0,0);
14857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14858 IEM_MC_ADVANCE_RIP();
14859 IEM_MC_END();
14860 return VINF_SUCCESS;
14861#else
14862 return IEMOP_RAISE_INVALID_OPCODE();
14863#endif
14864}
14865
14866
14867/** Opcode 0xdb 11/5. */
14868FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14869{
14870 IEMOP_MNEMONIC("fucomi st0,stN");
14871 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14872}
14873
14874
14875/** Opcode 0xdb 11/6. */
14876FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14877{
14878 IEMOP_MNEMONIC("fcomi st0,stN");
14879 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14880}
14881
14882
14883/** Opcode 0xdb. */
14884FNIEMOP_DEF(iemOp_EscF3)
14885{
14886 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14889 {
14890 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14891 {
14892 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14893 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14894 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14895 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14896 case 4:
14897 switch (bRm)
14898 {
14899 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14900 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14901 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14902 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14903 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14904 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14905 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14906 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14908 }
14909 break;
14910 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14911 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14912 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14914 }
14915 }
14916 else
14917 {
14918 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14919 {
14920 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14921 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14922 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14923 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14924 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14925 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14926 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14927 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14929 }
14930 }
14931}
14932
14933
14934/**
14935 * Common worker for FPU instructions working on STn and ST0, and storing the
14936 * result in STn unless IE, DE or ZE was raised.
14937 *
14938 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14939 */
14940FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14941{
14942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14943
14944 IEM_MC_BEGIN(3, 1);
14945 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14946 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14947 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14949
14950 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14951 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14952
14953 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14954 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14955 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14956 IEM_MC_ELSE()
14957 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14958 IEM_MC_ENDIF();
14959 IEM_MC_USED_FPU();
14960 IEM_MC_ADVANCE_RIP();
14961
14962 IEM_MC_END();
14963 return VINF_SUCCESS;
14964}
14965
14966
14967/** Opcode 0xdc 11/0. */
14968FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14969{
14970 IEMOP_MNEMONIC("fadd stN,st0");
14971 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14972}
14973
14974
14975/** Opcode 0xdc 11/1. */
14976FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14977{
14978 IEMOP_MNEMONIC("fmul stN,st0");
14979 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14980}
14981
14982
14983/** Opcode 0xdc 11/4. */
14984FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14985{
14986 IEMOP_MNEMONIC("fsubr stN,st0");
14987 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14988}
14989
14990
14991/** Opcode 0xdc 11/5. */
14992FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14993{
14994 IEMOP_MNEMONIC("fsub stN,st0");
14995 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14996}
14997
14998
14999/** Opcode 0xdc 11/6. */
15000FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15001{
15002 IEMOP_MNEMONIC("fdivr stN,st0");
15003 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15004}
15005
15006
15007/** Opcode 0xdc 11/7. */
15008FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15009{
15010 IEMOP_MNEMONIC("fdiv stN,st0");
15011 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15012}
15013
15014
15015/**
15016 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15017 * memory operand, and storing the result in ST0.
15018 *
15019 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15020 */
15021FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15022{
15023 IEM_MC_BEGIN(3, 3);
15024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15025 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15026 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15027 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15028 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15029 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15030
15031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15035
15036 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15037 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15038 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15039 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15040 IEM_MC_ELSE()
15041 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15042 IEM_MC_ENDIF();
15043 IEM_MC_USED_FPU();
15044 IEM_MC_ADVANCE_RIP();
15045
15046 IEM_MC_END();
15047 return VINF_SUCCESS;
15048}
15049
15050
15051/** Opcode 0xdc !11/0. */
15052FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15053{
15054 IEMOP_MNEMONIC("fadd m64r");
15055 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15056}
15057
15058
15059/** Opcode 0xdc !11/1. */
15060FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15061{
15062 IEMOP_MNEMONIC("fmul m64r");
15063 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15064}
15065
15066
15067/** Opcode 0xdc !11/2. */
15068FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15069{
15070 IEMOP_MNEMONIC("fcom st0,m64r");
15071
15072 IEM_MC_BEGIN(3, 3);
15073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15074 IEM_MC_LOCAL(uint16_t, u16Fsw);
15075 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15076 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15078 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15079
15080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15082
15083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15085 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15086
15087 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15088 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15089 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15090 IEM_MC_ELSE()
15091 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15092 IEM_MC_ENDIF();
15093 IEM_MC_USED_FPU();
15094 IEM_MC_ADVANCE_RIP();
15095
15096 IEM_MC_END();
15097 return VINF_SUCCESS;
15098}
15099
15100
15101/** Opcode 0xdc !11/3. */
15102FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15103{
15104 IEMOP_MNEMONIC("fcomp st0,m64r");
15105
15106 IEM_MC_BEGIN(3, 3);
15107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15108 IEM_MC_LOCAL(uint16_t, u16Fsw);
15109 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15110 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15111 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15112 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15113
15114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15116
15117 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15118 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15119 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15120
15121 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15122 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15123 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15124 IEM_MC_ELSE()
15125 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15126 IEM_MC_ENDIF();
15127 IEM_MC_USED_FPU();
15128 IEM_MC_ADVANCE_RIP();
15129
15130 IEM_MC_END();
15131 return VINF_SUCCESS;
15132}
15133
15134
15135/** Opcode 0xdc !11/4. */
15136FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15137{
15138 IEMOP_MNEMONIC("fsub m64r");
15139 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15140}
15141
15142
15143/** Opcode 0xdc !11/5. */
15144FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15145{
15146 IEMOP_MNEMONIC("fsubr m64r");
15147 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15148}
15149
15150
15151/** Opcode 0xdc !11/6. */
15152FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15153{
15154 IEMOP_MNEMONIC("fdiv m64r");
15155 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15156}
15157
15158
15159/** Opcode 0xdc !11/7. */
15160FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15161{
15162 IEMOP_MNEMONIC("fdivr m64r");
15163 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15164}
15165
15166
15167/** Opcode 0xdc. */
15168FNIEMOP_DEF(iemOp_EscF4)
15169{
15170 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15173 {
15174 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15175 {
15176 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15177 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15178 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15179 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15180 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15181 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15182 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15183 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15185 }
15186 }
15187 else
15188 {
15189 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15190 {
15191 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15192 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15193 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15194 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15195 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15196 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15197 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15198 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15200 }
15201 }
15202}
15203
15204
15205/** Opcode 0xdd !11/0.
15206 * @sa iemOp_fld_m32r */
15207FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15208{
15209 IEMOP_MNEMONIC("fld m64r");
15210
15211 IEM_MC_BEGIN(2, 3);
15212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15213 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15214 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15215 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15216 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15217
15218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15220 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15221 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15222
15223 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15224 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15225 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15226 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15227 IEM_MC_ELSE()
15228 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15229 IEM_MC_ENDIF();
15230 IEM_MC_USED_FPU();
15231 IEM_MC_ADVANCE_RIP();
15232
15233 IEM_MC_END();
15234 return VINF_SUCCESS;
15235}
15236
15237
15238/** Opcode 0xdd !11/0. */
15239FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15240{
15241 IEMOP_MNEMONIC("fisttp m64i");
15242 IEM_MC_BEGIN(3, 2);
15243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15244 IEM_MC_LOCAL(uint16_t, u16Fsw);
15245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15246 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15248
15249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15253
15254 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15255 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15256 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15257 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15259 IEM_MC_ELSE()
15260 IEM_MC_IF_FCW_IM()
15261 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15262 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15263 IEM_MC_ENDIF();
15264 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15265 IEM_MC_ENDIF();
15266 IEM_MC_USED_FPU();
15267 IEM_MC_ADVANCE_RIP();
15268
15269 IEM_MC_END();
15270 return VINF_SUCCESS;
15271}
15272
15273
15274/** Opcode 0xdd !11/0. */
15275FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15276{
15277 IEMOP_MNEMONIC("fst m64r");
15278 IEM_MC_BEGIN(3, 2);
15279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15280 IEM_MC_LOCAL(uint16_t, u16Fsw);
15281 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15282 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15283 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15284
15285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15289
15290 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15291 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15292 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15293 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15294 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15295 IEM_MC_ELSE()
15296 IEM_MC_IF_FCW_IM()
15297 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15298 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15299 IEM_MC_ENDIF();
15300 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15301 IEM_MC_ENDIF();
15302 IEM_MC_USED_FPU();
15303 IEM_MC_ADVANCE_RIP();
15304
15305 IEM_MC_END();
15306 return VINF_SUCCESS;
15307}
15308
15309
15310
15311
15312/** Opcode 0xdd !11/0. */
15313FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15314{
15315 IEMOP_MNEMONIC("fstp m64r");
15316 IEM_MC_BEGIN(3, 2);
15317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15318 IEM_MC_LOCAL(uint16_t, u16Fsw);
15319 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15320 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15321 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15322
15323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15325 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15326 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15327
15328 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15329 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15330 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15331 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15332 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15333 IEM_MC_ELSE()
15334 IEM_MC_IF_FCW_IM()
15335 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15336 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15337 IEM_MC_ENDIF();
15338 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15339 IEM_MC_ENDIF();
15340 IEM_MC_USED_FPU();
15341 IEM_MC_ADVANCE_RIP();
15342
15343 IEM_MC_END();
15344 return VINF_SUCCESS;
15345}
15346
15347
15348/** Opcode 0xdd !11/0. */
15349FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15350{
15351 IEMOP_MNEMONIC("frstor m94/108byte");
15352 IEM_MC_BEGIN(3, 0);
15353 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15354 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15355 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15359 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15360 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15361 IEM_MC_END();
15362 return VINF_SUCCESS;
15363}
15364
15365
15366/** Opcode 0xdd !11/0. */
15367FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15368{
15369 IEMOP_MNEMONIC("fnsave m94/108byte");
15370 IEM_MC_BEGIN(3, 0);
15371 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15372 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15373 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15377 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15378 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15379 IEM_MC_END();
15380 return VINF_SUCCESS;
15381
15382}
15383
15384/** Opcode 0xdd !11/0. */
15385FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15386{
15387 IEMOP_MNEMONIC("fnstsw m16");
15388
15389 IEM_MC_BEGIN(0, 2);
15390 IEM_MC_LOCAL(uint16_t, u16Tmp);
15391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15392
15393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15396
15397 IEM_MC_FETCH_FSW(u16Tmp);
15398 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15399 IEM_MC_ADVANCE_RIP();
15400
15401/** @todo Debug / drop a hint to the verifier that things may differ
15402 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15403 * NT4SP1. (X86_FSW_PE) */
15404 IEM_MC_END();
15405 return VINF_SUCCESS;
15406}
15407
15408
15409/** Opcode 0xdd 11/0. */
15410FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15411{
15412 IEMOP_MNEMONIC("ffree stN");
15413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15414 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15415 unmodified. */
15416
15417 IEM_MC_BEGIN(0, 0);
15418
15419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15421
15422 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15423 IEM_MC_UPDATE_FPU_OPCODE_IP();
15424
15425 IEM_MC_USED_FPU();
15426 IEM_MC_ADVANCE_RIP();
15427 IEM_MC_END();
15428 return VINF_SUCCESS;
15429}
15430
15431
15432/** Opcode 0xdd 11/1. */
15433FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15434{
15435 IEMOP_MNEMONIC("fst st0,stN");
15436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15437
15438 IEM_MC_BEGIN(0, 2);
15439 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15440 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15443 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15444 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15445 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15446 IEM_MC_ELSE()
15447 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15448 IEM_MC_ENDIF();
15449 IEM_MC_USED_FPU();
15450 IEM_MC_ADVANCE_RIP();
15451 IEM_MC_END();
15452 return VINF_SUCCESS;
15453}
15454
15455
15456/** Opcode 0xdd 11/3. */
15457FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15458{
15459 IEMOP_MNEMONIC("fcom st0,stN");
15460 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15461}
15462
15463
15464/** Opcode 0xdd 11/4. */
15465FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15466{
15467 IEMOP_MNEMONIC("fcomp st0,stN");
15468 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15469}
15470
15471
15472/** Opcode 0xdd. */
15473FNIEMOP_DEF(iemOp_EscF5)
15474{
15475 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15478 {
15479 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15480 {
15481 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15482 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15483 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15484 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15485 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15486 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15487 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15488 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15490 }
15491 }
15492 else
15493 {
15494 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15495 {
15496 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15497 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15498 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15499 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15500 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15501 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15502 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15503 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15505 }
15506 }
15507}
15508
15509
15510/** Opcode 0xde 11/0. */
15511FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15512{
15513 IEMOP_MNEMONIC("faddp stN,st0");
15514 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15515}
15516
15517
15518/** Opcode 0xde 11/0. */
15519FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15520{
15521 IEMOP_MNEMONIC("fmulp stN,st0");
15522 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15523}
15524
15525
15526/** Opcode 0xde 0xd9. */
15527FNIEMOP_DEF(iemOp_fcompp)
15528{
15529 IEMOP_MNEMONIC("fucompp st0,stN");
15530 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15531}
15532
15533
15534/** Opcode 0xde 11/4. */
15535FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15536{
15537 IEMOP_MNEMONIC("fsubrp stN,st0");
15538 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15539}
15540
15541
15542/** Opcode 0xde 11/5. */
15543FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15544{
15545 IEMOP_MNEMONIC("fsubp stN,st0");
15546 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15547}
15548
15549
15550/** Opcode 0xde 11/6. */
15551FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15552{
15553 IEMOP_MNEMONIC("fdivrp stN,st0");
15554 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15555}
15556
15557
15558/** Opcode 0xde 11/7. */
15559FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15560{
15561 IEMOP_MNEMONIC("fdivp stN,st0");
15562 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15563}
15564
15565
15566/**
15567 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15568 * the result in ST0.
15569 *
15570 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15571 */
15572FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15573{
15574 IEM_MC_BEGIN(3, 3);
15575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15576 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15577 IEM_MC_LOCAL(int16_t, i16Val2);
15578 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15580 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15581
15582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15584
15585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15586 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15587 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15588
15589 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15590 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15591 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15592 IEM_MC_ELSE()
15593 IEM_MC_FPU_STACK_UNDERFLOW(0);
15594 IEM_MC_ENDIF();
15595 IEM_MC_USED_FPU();
15596 IEM_MC_ADVANCE_RIP();
15597
15598 IEM_MC_END();
15599 return VINF_SUCCESS;
15600}
15601
15602
15603/** Opcode 0xde !11/0. */
15604FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15605{
15606 IEMOP_MNEMONIC("fiadd m16i");
15607 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15608}
15609
15610
15611/** Opcode 0xde !11/1. */
15612FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15613{
15614 IEMOP_MNEMONIC("fimul m16i");
15615 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15616}
15617
15618
15619/** Opcode 0xde !11/2. */
15620FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15621{
15622 IEMOP_MNEMONIC("ficom st0,m16i");
15623
15624 IEM_MC_BEGIN(3, 3);
15625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15626 IEM_MC_LOCAL(uint16_t, u16Fsw);
15627 IEM_MC_LOCAL(int16_t, i16Val2);
15628 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15629 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15630 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15631
15632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15634
15635 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15637 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15638
15639 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15640 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15641 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15642 IEM_MC_ELSE()
15643 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15644 IEM_MC_ENDIF();
15645 IEM_MC_USED_FPU();
15646 IEM_MC_ADVANCE_RIP();
15647
15648 IEM_MC_END();
15649 return VINF_SUCCESS;
15650}
15651
15652
15653/** Opcode 0xde !11/3. */
15654FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15655{
15656 IEMOP_MNEMONIC("ficomp st0,m16i");
15657
15658 IEM_MC_BEGIN(3, 3);
15659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15660 IEM_MC_LOCAL(uint16_t, u16Fsw);
15661 IEM_MC_LOCAL(int16_t, i16Val2);
15662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15664 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15665
15666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15668
15669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15671 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15672
15673 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15674 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15675 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15676 IEM_MC_ELSE()
15677 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15678 IEM_MC_ENDIF();
15679 IEM_MC_USED_FPU();
15680 IEM_MC_ADVANCE_RIP();
15681
15682 IEM_MC_END();
15683 return VINF_SUCCESS;
15684}
15685
15686
15687/** Opcode 0xde !11/4. */
15688FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15689{
15690 IEMOP_MNEMONIC("fisub m16i");
15691 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15692}
15693
15694
15695/** Opcode 0xde !11/5. */
15696FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15697{
15698 IEMOP_MNEMONIC("fisubr m16i");
15699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15700}
15701
15702
15703/** Opcode 0xde !11/6. */
15704FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15705{
15706 IEMOP_MNEMONIC("fiadd m16i");
15707 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15708}
15709
15710
15711/** Opcode 0xde !11/7. */
15712FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15713{
15714 IEMOP_MNEMONIC("fiadd m16i");
15715 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15716}
15717
15718
15719/** Opcode 0xde. */
15720FNIEMOP_DEF(iemOp_EscF6)
15721{
15722 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15725 {
15726 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15727 {
15728 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15729 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15730 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15731 case 3: if (bRm == 0xd9)
15732 return FNIEMOP_CALL(iemOp_fcompp);
15733 return IEMOP_RAISE_INVALID_OPCODE();
15734 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15735 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15736 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15737 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15739 }
15740 }
15741 else
15742 {
15743 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15744 {
15745 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15746 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15747 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15748 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15749 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15750 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15751 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15752 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15754 }
15755 }
15756}
15757
15758
15759/** Opcode 0xdf 11/0.
15760 * Undocument instruction, assumed to work like ffree + fincstp. */
15761FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15762{
15763 IEMOP_MNEMONIC("ffreep stN");
15764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15765
15766 IEM_MC_BEGIN(0, 0);
15767
15768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15770
15771 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15772 IEM_MC_FPU_STACK_INC_TOP();
15773 IEM_MC_UPDATE_FPU_OPCODE_IP();
15774
15775 IEM_MC_USED_FPU();
15776 IEM_MC_ADVANCE_RIP();
15777 IEM_MC_END();
15778 return VINF_SUCCESS;
15779}
15780
15781
15782/** Opcode 0xdf 0xe0. */
15783FNIEMOP_DEF(iemOp_fnstsw_ax)
15784{
15785 IEMOP_MNEMONIC("fnstsw ax");
15786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15787
15788 IEM_MC_BEGIN(0, 1);
15789 IEM_MC_LOCAL(uint16_t, u16Tmp);
15790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15791 IEM_MC_FETCH_FSW(u16Tmp);
15792 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15793 IEM_MC_ADVANCE_RIP();
15794 IEM_MC_END();
15795 return VINF_SUCCESS;
15796}
15797
15798
15799/** Opcode 0xdf 11/5. */
15800FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15801{
15802 IEMOP_MNEMONIC("fcomip st0,stN");
15803 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15804}
15805
15806
15807/** Opcode 0xdf 11/6. */
15808FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15809{
15810 IEMOP_MNEMONIC("fcomip st0,stN");
15811 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15812}
15813
15814
15815/** Opcode 0xdf !11/0. */
15816FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15817{
15818 IEMOP_MNEMONIC("fild m16i");
15819
15820 IEM_MC_BEGIN(2, 3);
15821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15822 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15823 IEM_MC_LOCAL(int16_t, i16Val);
15824 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15825 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15826
15827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15829
15830 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15831 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15832 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15833
15834 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15835 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15836 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15837 IEM_MC_ELSE()
15838 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15839 IEM_MC_ENDIF();
15840 IEM_MC_USED_FPU();
15841 IEM_MC_ADVANCE_RIP();
15842
15843 IEM_MC_END();
15844 return VINF_SUCCESS;
15845}
15846
15847
15848/** Opcode 0xdf !11/1. */
15849FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15850{
15851 IEMOP_MNEMONIC("fisttp m16i");
15852 IEM_MC_BEGIN(3, 2);
15853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15854 IEM_MC_LOCAL(uint16_t, u16Fsw);
15855 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15856 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15857 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15858
15859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15863
15864 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15866 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15867 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15868 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15869 IEM_MC_ELSE()
15870 IEM_MC_IF_FCW_IM()
15871 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15872 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15873 IEM_MC_ENDIF();
15874 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15875 IEM_MC_ENDIF();
15876 IEM_MC_USED_FPU();
15877 IEM_MC_ADVANCE_RIP();
15878
15879 IEM_MC_END();
15880 return VINF_SUCCESS;
15881}
15882
15883
15884/** Opcode 0xdf !11/2. */
15885FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15886{
15887 IEMOP_MNEMONIC("fistp m16i");
15888 IEM_MC_BEGIN(3, 2);
15889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15890 IEM_MC_LOCAL(uint16_t, u16Fsw);
15891 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15892 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15893 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15894
15895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15899
15900 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15901 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15902 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15903 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15904 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15905 IEM_MC_ELSE()
15906 IEM_MC_IF_FCW_IM()
15907 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15908 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15909 IEM_MC_ENDIF();
15910 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15911 IEM_MC_ENDIF();
15912 IEM_MC_USED_FPU();
15913 IEM_MC_ADVANCE_RIP();
15914
15915 IEM_MC_END();
15916 return VINF_SUCCESS;
15917}
15918
15919
15920/** Opcode 0xdf !11/3. */
15921FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15922{
15923 IEMOP_MNEMONIC("fistp m16i");
15924 IEM_MC_BEGIN(3, 2);
15925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15926 IEM_MC_LOCAL(uint16_t, u16Fsw);
15927 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15928 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15930
15931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15935
15936 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15937 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15938 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15939 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15940 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15941 IEM_MC_ELSE()
15942 IEM_MC_IF_FCW_IM()
15943 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15944 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15945 IEM_MC_ENDIF();
15946 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15947 IEM_MC_ENDIF();
15948 IEM_MC_USED_FPU();
15949 IEM_MC_ADVANCE_RIP();
15950
15951 IEM_MC_END();
15952 return VINF_SUCCESS;
15953}
15954
15955
15956/** Opcode 0xdf !11/4. */
15957FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15958
15959
15960/** Opcode 0xdf !11/5. */
15961FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15962{
15963 IEMOP_MNEMONIC("fild m64i");
15964
15965 IEM_MC_BEGIN(2, 3);
15966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15967 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15968 IEM_MC_LOCAL(int64_t, i64Val);
15969 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15970 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15971
15972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15974
15975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15977 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15978
15979 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15980 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15981 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15982 IEM_MC_ELSE()
15983 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15984 IEM_MC_ENDIF();
15985 IEM_MC_USED_FPU();
15986 IEM_MC_ADVANCE_RIP();
15987
15988 IEM_MC_END();
15989 return VINF_SUCCESS;
15990}
15991
15992
15993/** Opcode 0xdf !11/6. */
15994FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15995
15996
15997/** Opcode 0xdf !11/7. */
15998FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15999{
16000 IEMOP_MNEMONIC("fistp m64i");
16001 IEM_MC_BEGIN(3, 2);
16002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16003 IEM_MC_LOCAL(uint16_t, u16Fsw);
16004 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16005 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16007
16008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16010 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16012
16013 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16014 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16015 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16016 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16017 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16018 IEM_MC_ELSE()
16019 IEM_MC_IF_FCW_IM()
16020 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16021 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16022 IEM_MC_ENDIF();
16023 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16024 IEM_MC_ENDIF();
16025 IEM_MC_USED_FPU();
16026 IEM_MC_ADVANCE_RIP();
16027
16028 IEM_MC_END();
16029 return VINF_SUCCESS;
16030}
16031
16032
16033/** Opcode 0xdf. */
16034FNIEMOP_DEF(iemOp_EscF7)
16035{
16036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16038 {
16039 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16040 {
16041 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16042 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16043 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16044 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16045 case 4: if (bRm == 0xe0)
16046 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16047 return IEMOP_RAISE_INVALID_OPCODE();
16048 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16049 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16050 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16052 }
16053 }
16054 else
16055 {
16056 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16057 {
16058 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16059 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16060 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16061 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16062 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16063 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16064 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16065 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16067 }
16068 }
16069}
16070
16071
16072/** Opcode 0xe0. */
16073FNIEMOP_DEF(iemOp_loopne_Jb)
16074{
16075 IEMOP_MNEMONIC("loopne Jb");
16076 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16077 IEMOP_HLP_NO_LOCK_PREFIX();
16078 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16079
16080 switch (pIemCpu->enmEffAddrMode)
16081 {
16082 case IEMMODE_16BIT:
16083 IEM_MC_BEGIN(0,0);
16084 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16085 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16086 IEM_MC_REL_JMP_S8(i8Imm);
16087 } IEM_MC_ELSE() {
16088 IEM_MC_ADVANCE_RIP();
16089 } IEM_MC_ENDIF();
16090 IEM_MC_END();
16091 return VINF_SUCCESS;
16092
16093 case IEMMODE_32BIT:
16094 IEM_MC_BEGIN(0,0);
16095 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16096 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16097 IEM_MC_REL_JMP_S8(i8Imm);
16098 } IEM_MC_ELSE() {
16099 IEM_MC_ADVANCE_RIP();
16100 } IEM_MC_ENDIF();
16101 IEM_MC_END();
16102 return VINF_SUCCESS;
16103
16104 case IEMMODE_64BIT:
16105 IEM_MC_BEGIN(0,0);
16106 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16107 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16108 IEM_MC_REL_JMP_S8(i8Imm);
16109 } IEM_MC_ELSE() {
16110 IEM_MC_ADVANCE_RIP();
16111 } IEM_MC_ENDIF();
16112 IEM_MC_END();
16113 return VINF_SUCCESS;
16114
16115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16116 }
16117}
16118
16119
16120/** Opcode 0xe1. */
16121FNIEMOP_DEF(iemOp_loope_Jb)
16122{
16123 IEMOP_MNEMONIC("loope Jb");
16124 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16125 IEMOP_HLP_NO_LOCK_PREFIX();
16126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16127
16128 switch (pIemCpu->enmEffAddrMode)
16129 {
16130 case IEMMODE_16BIT:
16131 IEM_MC_BEGIN(0,0);
16132 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16133 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16134 IEM_MC_REL_JMP_S8(i8Imm);
16135 } IEM_MC_ELSE() {
16136 IEM_MC_ADVANCE_RIP();
16137 } IEM_MC_ENDIF();
16138 IEM_MC_END();
16139 return VINF_SUCCESS;
16140
16141 case IEMMODE_32BIT:
16142 IEM_MC_BEGIN(0,0);
16143 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16144 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16145 IEM_MC_REL_JMP_S8(i8Imm);
16146 } IEM_MC_ELSE() {
16147 IEM_MC_ADVANCE_RIP();
16148 } IEM_MC_ENDIF();
16149 IEM_MC_END();
16150 return VINF_SUCCESS;
16151
16152 case IEMMODE_64BIT:
16153 IEM_MC_BEGIN(0,0);
16154 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16155 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16156 IEM_MC_REL_JMP_S8(i8Imm);
16157 } IEM_MC_ELSE() {
16158 IEM_MC_ADVANCE_RIP();
16159 } IEM_MC_ENDIF();
16160 IEM_MC_END();
16161 return VINF_SUCCESS;
16162
16163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16164 }
16165}
16166
16167
16168/** Opcode 0xe2. */
16169FNIEMOP_DEF(iemOp_loop_Jb)
16170{
16171 IEMOP_MNEMONIC("loop Jb");
16172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16173 IEMOP_HLP_NO_LOCK_PREFIX();
16174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16175
16176 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16177 * using the 32-bit operand size override. How can that be restarted? See
16178 * weird pseudo code in intel manual. */
16179 switch (pIemCpu->enmEffAddrMode)
16180 {
16181 case IEMMODE_16BIT:
16182 IEM_MC_BEGIN(0,0);
16183 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16184 {
16185 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16186 IEM_MC_IF_CX_IS_NZ() {
16187 IEM_MC_REL_JMP_S8(i8Imm);
16188 } IEM_MC_ELSE() {
16189 IEM_MC_ADVANCE_RIP();
16190 } IEM_MC_ENDIF();
16191 }
16192 else
16193 {
16194 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16195 IEM_MC_ADVANCE_RIP();
16196 }
16197 IEM_MC_END();
16198 return VINF_SUCCESS;
16199
16200 case IEMMODE_32BIT:
16201 IEM_MC_BEGIN(0,0);
16202 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16203 {
16204 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16205 IEM_MC_IF_ECX_IS_NZ() {
16206 IEM_MC_REL_JMP_S8(i8Imm);
16207 } IEM_MC_ELSE() {
16208 IEM_MC_ADVANCE_RIP();
16209 } IEM_MC_ENDIF();
16210 }
16211 else
16212 {
16213 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16214 IEM_MC_ADVANCE_RIP();
16215 }
16216 IEM_MC_END();
16217 return VINF_SUCCESS;
16218
16219 case IEMMODE_64BIT:
16220 IEM_MC_BEGIN(0,0);
16221 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16222 {
16223 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16224 IEM_MC_IF_RCX_IS_NZ() {
16225 IEM_MC_REL_JMP_S8(i8Imm);
16226 } IEM_MC_ELSE() {
16227 IEM_MC_ADVANCE_RIP();
16228 } IEM_MC_ENDIF();
16229 }
16230 else
16231 {
16232 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16233 IEM_MC_ADVANCE_RIP();
16234 }
16235 IEM_MC_END();
16236 return VINF_SUCCESS;
16237
16238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16239 }
16240}
16241
16242
16243/** Opcode 0xe3. */
16244FNIEMOP_DEF(iemOp_jecxz_Jb)
16245{
16246 IEMOP_MNEMONIC("jecxz Jb");
16247 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16248 IEMOP_HLP_NO_LOCK_PREFIX();
16249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16250
16251 switch (pIemCpu->enmEffAddrMode)
16252 {
16253 case IEMMODE_16BIT:
16254 IEM_MC_BEGIN(0,0);
16255 IEM_MC_IF_CX_IS_NZ() {
16256 IEM_MC_ADVANCE_RIP();
16257 } IEM_MC_ELSE() {
16258 IEM_MC_REL_JMP_S8(i8Imm);
16259 } IEM_MC_ENDIF();
16260 IEM_MC_END();
16261 return VINF_SUCCESS;
16262
16263 case IEMMODE_32BIT:
16264 IEM_MC_BEGIN(0,0);
16265 IEM_MC_IF_ECX_IS_NZ() {
16266 IEM_MC_ADVANCE_RIP();
16267 } IEM_MC_ELSE() {
16268 IEM_MC_REL_JMP_S8(i8Imm);
16269 } IEM_MC_ENDIF();
16270 IEM_MC_END();
16271 return VINF_SUCCESS;
16272
16273 case IEMMODE_64BIT:
16274 IEM_MC_BEGIN(0,0);
16275 IEM_MC_IF_RCX_IS_NZ() {
16276 IEM_MC_ADVANCE_RIP();
16277 } IEM_MC_ELSE() {
16278 IEM_MC_REL_JMP_S8(i8Imm);
16279 } IEM_MC_ENDIF();
16280 IEM_MC_END();
16281 return VINF_SUCCESS;
16282
16283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16284 }
16285}
16286
16287
16288/** Opcode 0xe4 */
16289FNIEMOP_DEF(iemOp_in_AL_Ib)
16290{
16291 IEMOP_MNEMONIC("in eAX,Ib");
16292 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16293 IEMOP_HLP_NO_LOCK_PREFIX();
16294 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16295}
16296
16297
16298/** Opcode 0xe5 */
16299FNIEMOP_DEF(iemOp_in_eAX_Ib)
16300{
16301 IEMOP_MNEMONIC("in eAX,Ib");
16302 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16303 IEMOP_HLP_NO_LOCK_PREFIX();
16304 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16305}
16306
16307
16308/** Opcode 0xe6 */
16309FNIEMOP_DEF(iemOp_out_Ib_AL)
16310{
16311 IEMOP_MNEMONIC("out Ib,AL");
16312 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16313 IEMOP_HLP_NO_LOCK_PREFIX();
16314 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16315}
16316
16317
16318/** Opcode 0xe7 */
16319FNIEMOP_DEF(iemOp_out_Ib_eAX)
16320{
16321 IEMOP_MNEMONIC("out Ib,eAX");
16322 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16323 IEMOP_HLP_NO_LOCK_PREFIX();
16324 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16325}
16326
16327
16328/** Opcode 0xe8. */
16329FNIEMOP_DEF(iemOp_call_Jv)
16330{
16331 IEMOP_MNEMONIC("call Jv");
16332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16333 switch (pIemCpu->enmEffOpSize)
16334 {
16335 case IEMMODE_16BIT:
16336 {
16337 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16338 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16339 }
16340
16341 case IEMMODE_32BIT:
16342 {
16343 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16344 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16345 }
16346
16347 case IEMMODE_64BIT:
16348 {
16349 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16350 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16351 }
16352
16353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16354 }
16355}
16356
16357
16358/** Opcode 0xe9. */
16359FNIEMOP_DEF(iemOp_jmp_Jv)
16360{
16361 IEMOP_MNEMONIC("jmp Jv");
16362 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16363 switch (pIemCpu->enmEffOpSize)
16364 {
16365 case IEMMODE_16BIT:
16366 {
16367 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16368 IEM_MC_BEGIN(0, 0);
16369 IEM_MC_REL_JMP_S16(i16Imm);
16370 IEM_MC_END();
16371 return VINF_SUCCESS;
16372 }
16373
16374 case IEMMODE_64BIT:
16375 case IEMMODE_32BIT:
16376 {
16377 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16378 IEM_MC_BEGIN(0, 0);
16379 IEM_MC_REL_JMP_S32(i32Imm);
16380 IEM_MC_END();
16381 return VINF_SUCCESS;
16382 }
16383
16384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16385 }
16386}
16387
16388
16389/** Opcode 0xea. */
16390FNIEMOP_DEF(iemOp_jmp_Ap)
16391{
16392 IEMOP_MNEMONIC("jmp Ap");
16393 IEMOP_HLP_NO_64BIT();
16394
16395 /* Decode the far pointer address and pass it on to the far call C implementation. */
16396 uint32_t offSeg;
16397 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16398 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16399 else
16400 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16401 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16402 IEMOP_HLP_NO_LOCK_PREFIX();
16403 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16404}
16405
16406
16407/** Opcode 0xeb. */
16408FNIEMOP_DEF(iemOp_jmp_Jb)
16409{
16410 IEMOP_MNEMONIC("jmp Jb");
16411 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16412 IEMOP_HLP_NO_LOCK_PREFIX();
16413 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16414
16415 IEM_MC_BEGIN(0, 0);
16416 IEM_MC_REL_JMP_S8(i8Imm);
16417 IEM_MC_END();
16418 return VINF_SUCCESS;
16419}
16420
16421
16422/** Opcode 0xec */
16423FNIEMOP_DEF(iemOp_in_AL_DX)
16424{
16425 IEMOP_MNEMONIC("in AL,DX");
16426 IEMOP_HLP_NO_LOCK_PREFIX();
16427 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16428}
16429
16430
16431/** Opcode 0xed */
16432FNIEMOP_DEF(iemOp_eAX_DX)
16433{
16434 IEMOP_MNEMONIC("in eAX,DX");
16435 IEMOP_HLP_NO_LOCK_PREFIX();
16436 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16437}
16438
16439
16440/** Opcode 0xee */
16441FNIEMOP_DEF(iemOp_out_DX_AL)
16442{
16443 IEMOP_MNEMONIC("out DX,AL");
16444 IEMOP_HLP_NO_LOCK_PREFIX();
16445 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16446}
16447
16448
16449/** Opcode 0xef */
16450FNIEMOP_DEF(iemOp_out_DX_eAX)
16451{
16452 IEMOP_MNEMONIC("out DX,eAX");
16453 IEMOP_HLP_NO_LOCK_PREFIX();
16454 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16455}
16456
16457
16458/** Opcode 0xf0. */
16459FNIEMOP_DEF(iemOp_lock)
16460{
16461 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16462 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16463
16464 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16465 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16466}
16467
16468
16469/** Opcode 0xf1. */
16470FNIEMOP_DEF(iemOp_int_1)
16471{
16472 IEMOP_MNEMONIC("int1"); /* icebp */
16473 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16474 /** @todo testcase! */
16475 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16476}
16477
16478
16479/** Opcode 0xf2. */
16480FNIEMOP_DEF(iemOp_repne)
16481{
16482 /* This overrides any previous REPE prefix. */
16483 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16484 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16485 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16486
16487 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16488 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16489}
16490
16491
16492/** Opcode 0xf3. */
16493FNIEMOP_DEF(iemOp_repe)
16494{
16495 /* This overrides any previous REPNE prefix. */
16496 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16497 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16498 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16499
16500 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16501 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16502}
16503
16504
16505/** Opcode 0xf4. */
16506FNIEMOP_DEF(iemOp_hlt)
16507{
16508 IEMOP_HLP_NO_LOCK_PREFIX();
16509 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16510}
16511
16512
16513/** Opcode 0xf5. */
16514FNIEMOP_DEF(iemOp_cmc)
16515{
16516 IEMOP_MNEMONIC("cmc");
16517 IEMOP_HLP_NO_LOCK_PREFIX();
16518 IEM_MC_BEGIN(0, 0);
16519 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16520 IEM_MC_ADVANCE_RIP();
16521 IEM_MC_END();
16522 return VINF_SUCCESS;
16523}
16524
16525
16526/**
16527 * Common implementation of 'inc/dec/not/neg Eb'.
16528 *
16529 * @param bRm The RM byte.
16530 * @param pImpl The instruction implementation.
16531 */
16532FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16533{
16534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16535 {
16536 /* register access */
16537 IEM_MC_BEGIN(2, 0);
16538 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16539 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16540 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16541 IEM_MC_REF_EFLAGS(pEFlags);
16542 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16543 IEM_MC_ADVANCE_RIP();
16544 IEM_MC_END();
16545 }
16546 else
16547 {
16548 /* memory access. */
16549 IEM_MC_BEGIN(2, 2);
16550 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16551 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16553
16554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16555 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16556 IEM_MC_FETCH_EFLAGS(EFlags);
16557 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16558 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16559 else
16560 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16561
16562 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16563 IEM_MC_COMMIT_EFLAGS(EFlags);
16564 IEM_MC_ADVANCE_RIP();
16565 IEM_MC_END();
16566 }
16567 return VINF_SUCCESS;
16568}
16569
16570
16571/**
16572 * Common implementation of 'inc/dec/not/neg Ev'.
16573 *
16574 * @param bRm The RM byte.
16575 * @param pImpl The instruction implementation.
16576 */
16577FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16578{
16579 /* Registers are handled by a common worker. */
16580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16581 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16582
16583 /* Memory we do here. */
16584 switch (pIemCpu->enmEffOpSize)
16585 {
16586 case IEMMODE_16BIT:
16587 IEM_MC_BEGIN(2, 2);
16588 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16589 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16591
16592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16593 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16594 IEM_MC_FETCH_EFLAGS(EFlags);
16595 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16596 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16597 else
16598 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16599
16600 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16601 IEM_MC_COMMIT_EFLAGS(EFlags);
16602 IEM_MC_ADVANCE_RIP();
16603 IEM_MC_END();
16604 return VINF_SUCCESS;
16605
16606 case IEMMODE_32BIT:
16607 IEM_MC_BEGIN(2, 2);
16608 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16611
16612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16613 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16614 IEM_MC_FETCH_EFLAGS(EFlags);
16615 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16616 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16617 else
16618 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16619
16620 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16621 IEM_MC_COMMIT_EFLAGS(EFlags);
16622 IEM_MC_ADVANCE_RIP();
16623 IEM_MC_END();
16624 return VINF_SUCCESS;
16625
16626 case IEMMODE_64BIT:
16627 IEM_MC_BEGIN(2, 2);
16628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16631
16632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16633 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16634 IEM_MC_FETCH_EFLAGS(EFlags);
16635 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16636 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16637 else
16638 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16639
16640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16641 IEM_MC_COMMIT_EFLAGS(EFlags);
16642 IEM_MC_ADVANCE_RIP();
16643 IEM_MC_END();
16644 return VINF_SUCCESS;
16645
16646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16647 }
16648}
16649
16650
16651/** Opcode 0xf6 /0. */
16652FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16653{
16654 IEMOP_MNEMONIC("test Eb,Ib");
16655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16656
16657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16658 {
16659 /* register access */
16660 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16661 IEMOP_HLP_NO_LOCK_PREFIX();
16662
16663 IEM_MC_BEGIN(3, 0);
16664 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16665 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16666 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16667 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16668 IEM_MC_REF_EFLAGS(pEFlags);
16669 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16670 IEM_MC_ADVANCE_RIP();
16671 IEM_MC_END();
16672 }
16673 else
16674 {
16675 /* memory access. */
16676 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16677
16678 IEM_MC_BEGIN(3, 2);
16679 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16680 IEM_MC_ARG(uint8_t, u8Src, 1);
16681 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16683
16684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16685 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16686 IEM_MC_ASSIGN(u8Src, u8Imm);
16687 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16688 IEM_MC_FETCH_EFLAGS(EFlags);
16689 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16690
16691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16692 IEM_MC_COMMIT_EFLAGS(EFlags);
16693 IEM_MC_ADVANCE_RIP();
16694 IEM_MC_END();
16695 }
16696 return VINF_SUCCESS;
16697}
16698
16699
16700/** Opcode 0xf7 /0. */
16701FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16702{
16703 IEMOP_MNEMONIC("test Ev,Iv");
16704 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16706
16707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16708 {
16709 /* register access */
16710 switch (pIemCpu->enmEffOpSize)
16711 {
16712 case IEMMODE_16BIT:
16713 {
16714 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16715 IEM_MC_BEGIN(3, 0);
16716 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16717 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16719 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16720 IEM_MC_REF_EFLAGS(pEFlags);
16721 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16722 IEM_MC_ADVANCE_RIP();
16723 IEM_MC_END();
16724 return VINF_SUCCESS;
16725 }
16726
16727 case IEMMODE_32BIT:
16728 {
16729 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16730 IEM_MC_BEGIN(3, 0);
16731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16732 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16734 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16735 IEM_MC_REF_EFLAGS(pEFlags);
16736 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16737 /* No clearing the high dword here - test doesn't write back the result. */
16738 IEM_MC_ADVANCE_RIP();
16739 IEM_MC_END();
16740 return VINF_SUCCESS;
16741 }
16742
16743 case IEMMODE_64BIT:
16744 {
16745 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16746 IEM_MC_BEGIN(3, 0);
16747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16748 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16750 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16751 IEM_MC_REF_EFLAGS(pEFlags);
16752 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16753 IEM_MC_ADVANCE_RIP();
16754 IEM_MC_END();
16755 return VINF_SUCCESS;
16756 }
16757
16758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16759 }
16760 }
16761 else
16762 {
16763 /* memory access. */
16764 switch (pIemCpu->enmEffOpSize)
16765 {
16766 case IEMMODE_16BIT:
16767 {
16768 IEM_MC_BEGIN(3, 2);
16769 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16770 IEM_MC_ARG(uint16_t, u16Src, 1);
16771 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16773
16774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16775 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16776 IEM_MC_ASSIGN(u16Src, u16Imm);
16777 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16778 IEM_MC_FETCH_EFLAGS(EFlags);
16779 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16780
16781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16782 IEM_MC_COMMIT_EFLAGS(EFlags);
16783 IEM_MC_ADVANCE_RIP();
16784 IEM_MC_END();
16785 return VINF_SUCCESS;
16786 }
16787
16788 case IEMMODE_32BIT:
16789 {
16790 IEM_MC_BEGIN(3, 2);
16791 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16792 IEM_MC_ARG(uint32_t, u32Src, 1);
16793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16795
16796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16797 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16798 IEM_MC_ASSIGN(u32Src, u32Imm);
16799 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16800 IEM_MC_FETCH_EFLAGS(EFlags);
16801 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16802
16803 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16804 IEM_MC_COMMIT_EFLAGS(EFlags);
16805 IEM_MC_ADVANCE_RIP();
16806 IEM_MC_END();
16807 return VINF_SUCCESS;
16808 }
16809
16810 case IEMMODE_64BIT:
16811 {
16812 IEM_MC_BEGIN(3, 2);
16813 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16814 IEM_MC_ARG(uint64_t, u64Src, 1);
16815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16817
16818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16819 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16820 IEM_MC_ASSIGN(u64Src, u64Imm);
16821 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16822 IEM_MC_FETCH_EFLAGS(EFlags);
16823 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16824
16825 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16826 IEM_MC_COMMIT_EFLAGS(EFlags);
16827 IEM_MC_ADVANCE_RIP();
16828 IEM_MC_END();
16829 return VINF_SUCCESS;
16830 }
16831
16832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16833 }
16834 }
16835}
16836
16837
16838/** Opcode 0xf6 /4, /5, /6 and /7. */
16839FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16840{
16841 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16842
16843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16844 {
16845 /* register access */
16846 IEMOP_HLP_NO_LOCK_PREFIX();
16847 IEM_MC_BEGIN(3, 1);
16848 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16849 IEM_MC_ARG(uint8_t, u8Value, 1);
16850 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16851 IEM_MC_LOCAL(int32_t, rc);
16852
16853 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16854 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16855 IEM_MC_REF_EFLAGS(pEFlags);
16856 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16857 IEM_MC_IF_LOCAL_IS_Z(rc) {
16858 IEM_MC_ADVANCE_RIP();
16859 } IEM_MC_ELSE() {
16860 IEM_MC_RAISE_DIVIDE_ERROR();
16861 } IEM_MC_ENDIF();
16862
16863 IEM_MC_END();
16864 }
16865 else
16866 {
16867 /* memory access. */
16868 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16869
16870 IEM_MC_BEGIN(3, 2);
16871 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16872 IEM_MC_ARG(uint8_t, u8Value, 1);
16873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16875 IEM_MC_LOCAL(int32_t, rc);
16876
16877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16878 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16879 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16880 IEM_MC_REF_EFLAGS(pEFlags);
16881 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16882 IEM_MC_IF_LOCAL_IS_Z(rc) {
16883 IEM_MC_ADVANCE_RIP();
16884 } IEM_MC_ELSE() {
16885 IEM_MC_RAISE_DIVIDE_ERROR();
16886 } IEM_MC_ENDIF();
16887
16888 IEM_MC_END();
16889 }
16890 return VINF_SUCCESS;
16891}
16892
16893
16894/** Opcode 0xf7 /4, /5, /6 and /7. */
16895FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16896{
16897 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16899
16900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16901 {
16902 /* register access */
16903 switch (pIemCpu->enmEffOpSize)
16904 {
16905 case IEMMODE_16BIT:
16906 {
16907 IEMOP_HLP_NO_LOCK_PREFIX();
16908 IEM_MC_BEGIN(4, 1);
16909 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16910 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16911 IEM_MC_ARG(uint16_t, u16Value, 2);
16912 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16913 IEM_MC_LOCAL(int32_t, rc);
16914
16915 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16916 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16917 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16918 IEM_MC_REF_EFLAGS(pEFlags);
16919 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16920 IEM_MC_IF_LOCAL_IS_Z(rc) {
16921 IEM_MC_ADVANCE_RIP();
16922 } IEM_MC_ELSE() {
16923 IEM_MC_RAISE_DIVIDE_ERROR();
16924 } IEM_MC_ENDIF();
16925
16926 IEM_MC_END();
16927 return VINF_SUCCESS;
16928 }
16929
16930 case IEMMODE_32BIT:
16931 {
16932 IEMOP_HLP_NO_LOCK_PREFIX();
16933 IEM_MC_BEGIN(4, 1);
16934 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16935 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16936 IEM_MC_ARG(uint32_t, u32Value, 2);
16937 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16938 IEM_MC_LOCAL(int32_t, rc);
16939
16940 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16941 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16942 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16943 IEM_MC_REF_EFLAGS(pEFlags);
16944 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16945 IEM_MC_IF_LOCAL_IS_Z(rc) {
16946 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16947 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16948 IEM_MC_ADVANCE_RIP();
16949 } IEM_MC_ELSE() {
16950 IEM_MC_RAISE_DIVIDE_ERROR();
16951 } IEM_MC_ENDIF();
16952
16953 IEM_MC_END();
16954 return VINF_SUCCESS;
16955 }
16956
16957 case IEMMODE_64BIT:
16958 {
16959 IEMOP_HLP_NO_LOCK_PREFIX();
16960 IEM_MC_BEGIN(4, 1);
16961 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16962 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16963 IEM_MC_ARG(uint64_t, u64Value, 2);
16964 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16965 IEM_MC_LOCAL(int32_t, rc);
16966
16967 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16968 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16969 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16970 IEM_MC_REF_EFLAGS(pEFlags);
16971 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16972 IEM_MC_IF_LOCAL_IS_Z(rc) {
16973 IEM_MC_ADVANCE_RIP();
16974 } IEM_MC_ELSE() {
16975 IEM_MC_RAISE_DIVIDE_ERROR();
16976 } IEM_MC_ENDIF();
16977
16978 IEM_MC_END();
16979 return VINF_SUCCESS;
16980 }
16981
16982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16983 }
16984 }
16985 else
16986 {
16987 /* memory access. */
16988 switch (pIemCpu->enmEffOpSize)
16989 {
16990 case IEMMODE_16BIT:
16991 {
16992 IEMOP_HLP_NO_LOCK_PREFIX();
16993 IEM_MC_BEGIN(4, 2);
16994 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16995 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16996 IEM_MC_ARG(uint16_t, u16Value, 2);
16997 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16999 IEM_MC_LOCAL(int32_t, rc);
17000
17001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17002 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17003 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17004 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17005 IEM_MC_REF_EFLAGS(pEFlags);
17006 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17007 IEM_MC_IF_LOCAL_IS_Z(rc) {
17008 IEM_MC_ADVANCE_RIP();
17009 } IEM_MC_ELSE() {
17010 IEM_MC_RAISE_DIVIDE_ERROR();
17011 } IEM_MC_ENDIF();
17012
17013 IEM_MC_END();
17014 return VINF_SUCCESS;
17015 }
17016
17017 case IEMMODE_32BIT:
17018 {
17019 IEMOP_HLP_NO_LOCK_PREFIX();
17020 IEM_MC_BEGIN(4, 2);
17021 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17022 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17023 IEM_MC_ARG(uint32_t, u32Value, 2);
17024 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17026 IEM_MC_LOCAL(int32_t, rc);
17027
17028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17029 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17030 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17031 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17032 IEM_MC_REF_EFLAGS(pEFlags);
17033 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17034 IEM_MC_IF_LOCAL_IS_Z(rc) {
17035 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17036 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17037 IEM_MC_ADVANCE_RIP();
17038 } IEM_MC_ELSE() {
17039 IEM_MC_RAISE_DIVIDE_ERROR();
17040 } IEM_MC_ENDIF();
17041
17042 IEM_MC_END();
17043 return VINF_SUCCESS;
17044 }
17045
17046 case IEMMODE_64BIT:
17047 {
17048 IEMOP_HLP_NO_LOCK_PREFIX();
17049 IEM_MC_BEGIN(4, 2);
17050 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17051 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17052 IEM_MC_ARG(uint64_t, u64Value, 2);
17053 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17055 IEM_MC_LOCAL(int32_t, rc);
17056
17057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17058 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17059 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17060 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17061 IEM_MC_REF_EFLAGS(pEFlags);
17062 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17063 IEM_MC_IF_LOCAL_IS_Z(rc) {
17064 IEM_MC_ADVANCE_RIP();
17065 } IEM_MC_ELSE() {
17066 IEM_MC_RAISE_DIVIDE_ERROR();
17067 } IEM_MC_ENDIF();
17068
17069 IEM_MC_END();
17070 return VINF_SUCCESS;
17071 }
17072
17073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17074 }
17075 }
17076}
17077
17078/** Opcode 0xf6. */
17079FNIEMOP_DEF(iemOp_Grp3_Eb)
17080{
17081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17082 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17083 {
17084 case 0:
17085 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17086 case 1:
17087/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17088 return IEMOP_RAISE_INVALID_OPCODE();
17089 case 2:
17090 IEMOP_MNEMONIC("not Eb");
17091 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17092 case 3:
17093 IEMOP_MNEMONIC("neg Eb");
17094 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17095 case 4:
17096 IEMOP_MNEMONIC("mul Eb");
17097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17098 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17099 case 5:
17100 IEMOP_MNEMONIC("imul Eb");
17101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17102 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17103 case 6:
17104 IEMOP_MNEMONIC("div Eb");
17105 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17106 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17107 case 7:
17108 IEMOP_MNEMONIC("idiv Eb");
17109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17110 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17112 }
17113}
17114
17115
17116/** Opcode 0xf7. */
17117FNIEMOP_DEF(iemOp_Grp3_Ev)
17118{
17119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17120 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17121 {
17122 case 0:
17123 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17124 case 1:
17125/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17126 return IEMOP_RAISE_INVALID_OPCODE();
17127 case 2:
17128 IEMOP_MNEMONIC("not Ev");
17129 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17130 case 3:
17131 IEMOP_MNEMONIC("neg Ev");
17132 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17133 case 4:
17134 IEMOP_MNEMONIC("mul Ev");
17135 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17136 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17137 case 5:
17138 IEMOP_MNEMONIC("imul Ev");
17139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17140 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17141 case 6:
17142 IEMOP_MNEMONIC("div Ev");
17143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17144 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17145 case 7:
17146 IEMOP_MNEMONIC("idiv Ev");
17147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17148 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17150 }
17151}
17152
17153
17154/** Opcode 0xf8. */
17155FNIEMOP_DEF(iemOp_clc)
17156{
17157 IEMOP_MNEMONIC("clc");
17158 IEMOP_HLP_NO_LOCK_PREFIX();
17159 IEM_MC_BEGIN(0, 0);
17160 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17161 IEM_MC_ADVANCE_RIP();
17162 IEM_MC_END();
17163 return VINF_SUCCESS;
17164}
17165
17166
17167/** Opcode 0xf9. */
17168FNIEMOP_DEF(iemOp_stc)
17169{
17170 IEMOP_MNEMONIC("stc");
17171 IEMOP_HLP_NO_LOCK_PREFIX();
17172 IEM_MC_BEGIN(0, 0);
17173 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17174 IEM_MC_ADVANCE_RIP();
17175 IEM_MC_END();
17176 return VINF_SUCCESS;
17177}
17178
17179
17180/** Opcode 0xfa. */
17181FNIEMOP_DEF(iemOp_cli)
17182{
17183 IEMOP_MNEMONIC("cli");
17184 IEMOP_HLP_NO_LOCK_PREFIX();
17185 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17186}
17187
17188
17189FNIEMOP_DEF(iemOp_sti)
17190{
17191 IEMOP_MNEMONIC("sti");
17192 IEMOP_HLP_NO_LOCK_PREFIX();
17193 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17194}
17195
17196
17197/** Opcode 0xfc. */
17198FNIEMOP_DEF(iemOp_cld)
17199{
17200 IEMOP_MNEMONIC("cld");
17201 IEMOP_HLP_NO_LOCK_PREFIX();
17202 IEM_MC_BEGIN(0, 0);
17203 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17204 IEM_MC_ADVANCE_RIP();
17205 IEM_MC_END();
17206 return VINF_SUCCESS;
17207}
17208
17209
17210/** Opcode 0xfd. */
17211FNIEMOP_DEF(iemOp_std)
17212{
17213 IEMOP_MNEMONIC("std");
17214 IEMOP_HLP_NO_LOCK_PREFIX();
17215 IEM_MC_BEGIN(0, 0);
17216 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17217 IEM_MC_ADVANCE_RIP();
17218 IEM_MC_END();
17219 return VINF_SUCCESS;
17220}
17221
17222
17223/** Opcode 0xfe. */
17224FNIEMOP_DEF(iemOp_Grp4)
17225{
17226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17227 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17228 {
17229 case 0:
17230 IEMOP_MNEMONIC("inc Ev");
17231 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17232 case 1:
17233 IEMOP_MNEMONIC("dec Ev");
17234 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17235 default:
17236 IEMOP_MNEMONIC("grp4-ud");
17237 return IEMOP_RAISE_INVALID_OPCODE();
17238 }
17239}
17240
17241
17242/**
17243 * Opcode 0xff /2.
17244 * @param bRm The RM byte.
17245 */
17246FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17247{
17248 IEMOP_MNEMONIC("calln Ev");
17249 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17250 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17251
17252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17253 {
17254 /* The new RIP is taken from a register. */
17255 switch (pIemCpu->enmEffOpSize)
17256 {
17257 case IEMMODE_16BIT:
17258 IEM_MC_BEGIN(1, 0);
17259 IEM_MC_ARG(uint16_t, u16Target, 0);
17260 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17261 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17262 IEM_MC_END()
17263 return VINF_SUCCESS;
17264
17265 case IEMMODE_32BIT:
17266 IEM_MC_BEGIN(1, 0);
17267 IEM_MC_ARG(uint32_t, u32Target, 0);
17268 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17269 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17270 IEM_MC_END()
17271 return VINF_SUCCESS;
17272
17273 case IEMMODE_64BIT:
17274 IEM_MC_BEGIN(1, 0);
17275 IEM_MC_ARG(uint64_t, u64Target, 0);
17276 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17277 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17278 IEM_MC_END()
17279 return VINF_SUCCESS;
17280
17281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17282 }
17283 }
17284 else
17285 {
17286 /* The new RIP is taken from a register. */
17287 switch (pIemCpu->enmEffOpSize)
17288 {
17289 case IEMMODE_16BIT:
17290 IEM_MC_BEGIN(1, 1);
17291 IEM_MC_ARG(uint16_t, u16Target, 0);
17292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17294 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17295 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17296 IEM_MC_END()
17297 return VINF_SUCCESS;
17298
17299 case IEMMODE_32BIT:
17300 IEM_MC_BEGIN(1, 1);
17301 IEM_MC_ARG(uint32_t, u32Target, 0);
17302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17304 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17305 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17306 IEM_MC_END()
17307 return VINF_SUCCESS;
17308
17309 case IEMMODE_64BIT:
17310 IEM_MC_BEGIN(1, 1);
17311 IEM_MC_ARG(uint64_t, u64Target, 0);
17312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17314 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17315 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17316 IEM_MC_END()
17317 return VINF_SUCCESS;
17318
17319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17320 }
17321 }
17322}
17323
17324typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17325
17326FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17327{
17328 /* Registers? How?? */
17329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17330 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17331
17332 /* Far pointer loaded from memory. */
17333 switch (pIemCpu->enmEffOpSize)
17334 {
17335 case IEMMODE_16BIT:
17336 IEM_MC_BEGIN(3, 1);
17337 IEM_MC_ARG(uint16_t, u16Sel, 0);
17338 IEM_MC_ARG(uint16_t, offSeg, 1);
17339 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17343 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17344 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17345 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17346 IEM_MC_END();
17347 return VINF_SUCCESS;
17348
17349 case IEMMODE_64BIT:
17350 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17351 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17352 * and call far qword [rsp] encodings. */
17353 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17354 {
17355 IEM_MC_BEGIN(3, 1);
17356 IEM_MC_ARG(uint16_t, u16Sel, 0);
17357 IEM_MC_ARG(uint64_t, offSeg, 1);
17358 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17362 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17363 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17364 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17365 IEM_MC_END();
17366 return VINF_SUCCESS;
17367 }
17368 /* AMD falls thru. */
17369
17370 case IEMMODE_32BIT:
17371 IEM_MC_BEGIN(3, 1);
17372 IEM_MC_ARG(uint16_t, u16Sel, 0);
17373 IEM_MC_ARG(uint32_t, offSeg, 1);
17374 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17378 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17379 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17380 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17381 IEM_MC_END();
17382 return VINF_SUCCESS;
17383
17384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17385 }
17386}
17387
17388
17389/**
17390 * Opcode 0xff /3.
17391 * @param bRm The RM byte.
17392 */
17393FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17394{
17395 IEMOP_MNEMONIC("callf Ep");
17396 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17397}
17398
17399
17400/**
17401 * Opcode 0xff /4.
17402 * @param bRm The RM byte.
17403 */
17404FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17405{
17406 IEMOP_MNEMONIC("jmpn Ev");
17407 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17408 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17409
17410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17411 {
17412 /* The new RIP is taken from a register. */
17413 switch (pIemCpu->enmEffOpSize)
17414 {
17415 case IEMMODE_16BIT:
17416 IEM_MC_BEGIN(0, 1);
17417 IEM_MC_LOCAL(uint16_t, u16Target);
17418 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17419 IEM_MC_SET_RIP_U16(u16Target);
17420 IEM_MC_END()
17421 return VINF_SUCCESS;
17422
17423 case IEMMODE_32BIT:
17424 IEM_MC_BEGIN(0, 1);
17425 IEM_MC_LOCAL(uint32_t, u32Target);
17426 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17427 IEM_MC_SET_RIP_U32(u32Target);
17428 IEM_MC_END()
17429 return VINF_SUCCESS;
17430
17431 case IEMMODE_64BIT:
17432 IEM_MC_BEGIN(0, 1);
17433 IEM_MC_LOCAL(uint64_t, u64Target);
17434 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17435 IEM_MC_SET_RIP_U64(u64Target);
17436 IEM_MC_END()
17437 return VINF_SUCCESS;
17438
17439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17440 }
17441 }
17442 else
17443 {
17444 /* The new RIP is taken from a memory location. */
17445 switch (pIemCpu->enmEffOpSize)
17446 {
17447 case IEMMODE_16BIT:
17448 IEM_MC_BEGIN(0, 2);
17449 IEM_MC_LOCAL(uint16_t, u16Target);
17450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17452 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17453 IEM_MC_SET_RIP_U16(u16Target);
17454 IEM_MC_END()
17455 return VINF_SUCCESS;
17456
17457 case IEMMODE_32BIT:
17458 IEM_MC_BEGIN(0, 2);
17459 IEM_MC_LOCAL(uint32_t, u32Target);
17460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17462 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17463 IEM_MC_SET_RIP_U32(u32Target);
17464 IEM_MC_END()
17465 return VINF_SUCCESS;
17466
17467 case IEMMODE_64BIT:
17468 IEM_MC_BEGIN(0, 2);
17469 IEM_MC_LOCAL(uint64_t, u64Target);
17470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17472 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17473 IEM_MC_SET_RIP_U64(u64Target);
17474 IEM_MC_END()
17475 return VINF_SUCCESS;
17476
17477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17478 }
17479 }
17480}
17481
17482
17483/**
17484 * Opcode 0xff /5.
17485 * @param bRm The RM byte.
17486 */
17487FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17488{
17489 IEMOP_MNEMONIC("jmpf Ep");
17490 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17491}
17492
17493
17494/**
17495 * Opcode 0xff /6.
17496 * @param bRm The RM byte.
17497 */
17498FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17499{
17500 IEMOP_MNEMONIC("push Ev");
17501 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17502
17503 /* Registers are handled by a common worker. */
17504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17505 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17506
17507 /* Memory we do here. */
17508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17509 switch (pIemCpu->enmEffOpSize)
17510 {
17511 case IEMMODE_16BIT:
17512 IEM_MC_BEGIN(0, 2);
17513 IEM_MC_LOCAL(uint16_t, u16Src);
17514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17516 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17517 IEM_MC_PUSH_U16(u16Src);
17518 IEM_MC_ADVANCE_RIP();
17519 IEM_MC_END();
17520 return VINF_SUCCESS;
17521
17522 case IEMMODE_32BIT:
17523 IEM_MC_BEGIN(0, 2);
17524 IEM_MC_LOCAL(uint32_t, u32Src);
17525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17527 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17528 IEM_MC_PUSH_U32(u32Src);
17529 IEM_MC_ADVANCE_RIP();
17530 IEM_MC_END();
17531 return VINF_SUCCESS;
17532
17533 case IEMMODE_64BIT:
17534 IEM_MC_BEGIN(0, 2);
17535 IEM_MC_LOCAL(uint64_t, u64Src);
17536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17538 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17539 IEM_MC_PUSH_U64(u64Src);
17540 IEM_MC_ADVANCE_RIP();
17541 IEM_MC_END();
17542 return VINF_SUCCESS;
17543
17544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17545 }
17546}
17547
17548
17549/** Opcode 0xff. */
17550FNIEMOP_DEF(iemOp_Grp5)
17551{
17552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17553 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17554 {
17555 case 0:
17556 IEMOP_MNEMONIC("inc Ev");
17557 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17558 case 1:
17559 IEMOP_MNEMONIC("dec Ev");
17560 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17561 case 2:
17562 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17563 case 3:
17564 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17565 case 4:
17566 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17567 case 5:
17568 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17569 case 6:
17570 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17571 case 7:
17572 IEMOP_MNEMONIC("grp5-ud");
17573 return IEMOP_RAISE_INVALID_OPCODE();
17574 }
17575 AssertFailedReturn(VERR_IEM_IPE_3);
17576}
17577
17578
17579
17580const PFNIEMOP g_apfnOneByteMap[256] =
17581{
17582 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17583 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17584 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17585 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17586 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17587 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17588 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17589 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17590 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17591 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17592 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17593 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17594 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17595 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17596 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17597 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17598 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17599 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17600 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17601 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17602 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17603 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17604 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17605 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17606 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17607 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17608 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17609 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17610 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17611 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17612 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17613 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17614 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17615 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17616 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17617 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17618 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17619 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17620 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17621 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17622 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17623 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17624 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17625 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17626 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17627 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17628 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17629 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17630 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17631 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17632 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17633 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17634 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17635 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17636 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17637 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17638 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17639 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17640 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17641 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17642 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17643 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17644 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17645 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17646};
17647
17648
17649/** @} */
17650
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette