VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61637

Last change on this file since 61637 was 61637, checked in by vboxsync, 9 years ago

IEM: Quick implementation of movups Wps,Vps.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 607.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61637 2016-06-09 18:49:56Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507
1508
1509/** Opcode 0x0f 0x11. */
1510FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1511{
1512 /* Quick hack. Need to restructure all of this later some time. */
1513 if (pIemCpu->fPrefixes == 0)
1514 {
1515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1517 {
1518 /*
1519 * Register, register.
1520 */
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1522 IEM_MC_BEGIN(0, 0);
1523 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1524 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1525 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1526 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1527 IEM_MC_ADVANCE_RIP();
1528 IEM_MC_END();
1529 }
1530 else
1531 {
1532 /*
1533 * Memory, register.
1534 */
1535 IEM_MC_BEGIN(0, 2);
1536 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538
1539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1540 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1543
1544 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1545 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1546
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 }
1550 return VINF_SUCCESS;
1551 }
1552
1553 IEMOP_BITCH_ABOUT_STUB();
1554 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1555}
1556
1557
1558/** Opcode 0x0f 0x12. */
1559FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1560/** Opcode 0x0f 0x13. */
1561FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1562/** Opcode 0x0f 0x14. */
1563FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1564/** Opcode 0x0f 0x15. */
1565FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1566/** Opcode 0x0f 0x16. */
1567FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1568/** Opcode 0x0f 0x17. */
1569FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1570
1571
1572/** Opcode 0x0f 0x18. */
1573FNIEMOP_DEF(iemOp_prefetch_Grp16)
1574{
1575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1576 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1577 {
1578 IEMOP_HLP_NO_LOCK_PREFIX();
1579 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1580 {
1581 case 4: /* Aliased to /0 for the time being according to AMD. */
1582 case 5: /* Aliased to /0 for the time being according to AMD. */
1583 case 6: /* Aliased to /0 for the time being according to AMD. */
1584 case 7: /* Aliased to /0 for the time being according to AMD. */
1585 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1586 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1587 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1588 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1590 }
1591
1592 IEM_MC_BEGIN(0, 1);
1593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1595 /* Currently a NOP. */
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 return VINF_SUCCESS;
1599 }
1600
1601 return IEMOP_RAISE_INVALID_OPCODE();
1602}
1603
1604
1605/** Opcode 0x0f 0x19..0x1f. */
1606FNIEMOP_DEF(iemOp_nop_Ev)
1607{
1608 IEMOP_HLP_NO_LOCK_PREFIX();
1609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1611 {
1612 IEM_MC_BEGIN(0, 0);
1613 IEM_MC_ADVANCE_RIP();
1614 IEM_MC_END();
1615 }
1616 else
1617 {
1618 IEM_MC_BEGIN(0, 1);
1619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 /* Currently a NOP. */
1622 IEM_MC_ADVANCE_RIP();
1623 IEM_MC_END();
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x20. */
1630FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1631{
1632 /* mod is ignored, as is operand size overrides. */
1633 IEMOP_MNEMONIC("mov Rd,Cd");
1634 IEMOP_HLP_MIN_386();
1635 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1636 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1637 else
1638 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1639
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1642 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1643 {
1644 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1645 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1646 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1647 iCrReg |= 8;
1648 }
1649 switch (iCrReg)
1650 {
1651 case 0: case 2: case 3: case 4: case 8:
1652 break;
1653 default:
1654 return IEMOP_RAISE_INVALID_OPCODE();
1655 }
1656 IEMOP_HLP_DONE_DECODING();
1657
1658 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1659}
1660
1661
1662/** Opcode 0x0f 0x21. */
1663FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1664{
1665 IEMOP_MNEMONIC("mov Rd,Dd");
1666 IEMOP_HLP_MIN_386();
1667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1668 IEMOP_HLP_NO_LOCK_PREFIX();
1669 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1670 return IEMOP_RAISE_INVALID_OPCODE();
1671 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1673 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1674}
1675
1676
1677/** Opcode 0x0f 0x22. */
1678FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1679{
1680 /* mod is ignored, as is operand size overrides. */
1681 IEMOP_MNEMONIC("mov Cd,Rd");
1682 IEMOP_HLP_MIN_386();
1683 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1684 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1685 else
1686 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1687
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1690 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1691 {
1692 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1693 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1694 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1695 iCrReg |= 8;
1696 }
1697 switch (iCrReg)
1698 {
1699 case 0: case 2: case 3: case 4: case 8:
1700 break;
1701 default:
1702 return IEMOP_RAISE_INVALID_OPCODE();
1703 }
1704 IEMOP_HLP_DONE_DECODING();
1705
1706 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1707}
1708
1709
1710/** Opcode 0x0f 0x23. */
1711FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1712{
1713 IEMOP_MNEMONIC("mov Dd,Rd");
1714 IEMOP_HLP_MIN_386();
1715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1718 return IEMOP_RAISE_INVALID_OPCODE();
1719 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1720 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1721 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1722}
1723
1724
1725/** Opcode 0x0f 0x24. */
1726FNIEMOP_DEF(iemOp_mov_Rd_Td)
1727{
1728 IEMOP_MNEMONIC("mov Rd,Td");
1729 /** @todo works on 386 and 486. */
1730 /* The RM byte is not considered, see testcase. */
1731 return IEMOP_RAISE_INVALID_OPCODE();
1732}
1733
1734
1735/** Opcode 0x0f 0x26. */
1736FNIEMOP_DEF(iemOp_mov_Td_Rd)
1737{
1738 IEMOP_MNEMONIC("mov Td,Rd");
1739 /** @todo works on 386 and 486. */
1740 /* The RM byte is not considered, see testcase. */
1741 return IEMOP_RAISE_INVALID_OPCODE();
1742}
1743
1744
1745/** Opcode 0x0f 0x28. */
1746FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1747{
1748 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752 /*
1753 * Register, register.
1754 */
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1756 IEM_MC_BEGIN(0, 0);
1757 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1759 else
1760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1761 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1762 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1763 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 }
1767 else
1768 {
1769 /*
1770 * Register, memory.
1771 */
1772 IEM_MC_BEGIN(0, 2);
1773 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1775
1776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1777 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1778 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1779 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1780 else
1781 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1782 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1783
1784 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1785 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1786
1787 IEM_MC_ADVANCE_RIP();
1788 IEM_MC_END();
1789 }
1790 return VINF_SUCCESS;
1791}
1792
1793
1794/** Opcode 0x0f 0x29. */
1795FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1796{
1797 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1800 {
1801 /*
1802 * Register, register.
1803 */
1804 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1805 IEM_MC_BEGIN(0, 0);
1806 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1808 else
1809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1811 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1812 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1813 IEM_MC_ADVANCE_RIP();
1814 IEM_MC_END();
1815 }
1816 else
1817 {
1818 /*
1819 * Memory, register.
1820 */
1821 IEM_MC_BEGIN(0, 2);
1822 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1824
1825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1826 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1827 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1828 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1829 else
1830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1831 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1832
1833 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1834 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1835
1836 IEM_MC_ADVANCE_RIP();
1837 IEM_MC_END();
1838 }
1839 return VINF_SUCCESS;
1840}
1841
1842
1843/** Opcode 0x0f 0x2a. */
1844FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1845
1846
1847/** Opcode 0x0f 0x2b. */
1848#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
1849FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1850{
1851 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1853 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1854 {
1855 /*
1856 * memory, register.
1857 */
1858 IEM_MC_BEGIN(0, 2);
1859 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1861
1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1864 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1865 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1866 else
1867 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1868 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1869
1870 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1871 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1872
1873 IEM_MC_ADVANCE_RIP();
1874 IEM_MC_END();
1875 }
1876 /* The register, register encoding is invalid. */
1877 else
1878 return IEMOP_RAISE_INVALID_OPCODE();
1879 return VINF_SUCCESS;
1880}
1881#else
1882FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1883#endif
1884
1885
1886/** Opcode 0x0f 0x2c. */
1887FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1888/** Opcode 0x0f 0x2d. */
1889FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1890/** Opcode 0x0f 0x2e. */
1891FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1892/** Opcode 0x0f 0x2f. */
1893FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1894
1895
1896/** Opcode 0x0f 0x30. */
1897FNIEMOP_DEF(iemOp_wrmsr)
1898{
1899 IEMOP_MNEMONIC("wrmsr");
1900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1901 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1902}
1903
1904
1905/** Opcode 0x0f 0x31. */
1906FNIEMOP_DEF(iemOp_rdtsc)
1907{
1908 IEMOP_MNEMONIC("rdtsc");
1909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1910 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1911}
1912
1913
1914/** Opcode 0x0f 0x33. */
1915FNIEMOP_DEF(iemOp_rdmsr)
1916{
1917 IEMOP_MNEMONIC("rdmsr");
1918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1919 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1920}
1921
1922
1923/** Opcode 0x0f 0x34. */
1924FNIEMOP_STUB(iemOp_rdpmc);
1925/** Opcode 0x0f 0x34. */
1926FNIEMOP_STUB(iemOp_sysenter);
1927/** Opcode 0x0f 0x35. */
1928FNIEMOP_STUB(iemOp_sysexit);
1929/** Opcode 0x0f 0x37. */
1930FNIEMOP_STUB(iemOp_getsec);
1931/** Opcode 0x0f 0x38. */
1932FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1933/** Opcode 0x0f 0x3a. */
1934FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1935
1936
1937/**
1938 * Implements a conditional move.
1939 *
1940 * Wish there was an obvious way to do this where we could share and reduce
1941 * code bloat.
1942 *
1943 * @param a_Cnd The conditional "microcode" operation.
1944 */
1945#define CMOV_X(a_Cnd) \
1946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1948 { \
1949 switch (pIemCpu->enmEffOpSize) \
1950 { \
1951 case IEMMODE_16BIT: \
1952 IEM_MC_BEGIN(0, 1); \
1953 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1954 a_Cnd { \
1955 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1956 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1957 } IEM_MC_ENDIF(); \
1958 IEM_MC_ADVANCE_RIP(); \
1959 IEM_MC_END(); \
1960 return VINF_SUCCESS; \
1961 \
1962 case IEMMODE_32BIT: \
1963 IEM_MC_BEGIN(0, 1); \
1964 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1965 a_Cnd { \
1966 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1967 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1968 } IEM_MC_ELSE() { \
1969 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1970 } IEM_MC_ENDIF(); \
1971 IEM_MC_ADVANCE_RIP(); \
1972 IEM_MC_END(); \
1973 return VINF_SUCCESS; \
1974 \
1975 case IEMMODE_64BIT: \
1976 IEM_MC_BEGIN(0, 1); \
1977 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1978 a_Cnd { \
1979 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1980 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1981 } IEM_MC_ENDIF(); \
1982 IEM_MC_ADVANCE_RIP(); \
1983 IEM_MC_END(); \
1984 return VINF_SUCCESS; \
1985 \
1986 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1987 } \
1988 } \
1989 else \
1990 { \
1991 switch (pIemCpu->enmEffOpSize) \
1992 { \
1993 case IEMMODE_16BIT: \
1994 IEM_MC_BEGIN(0, 2); \
1995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1996 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1998 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1999 a_Cnd { \
2000 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
2001 } IEM_MC_ENDIF(); \
2002 IEM_MC_ADVANCE_RIP(); \
2003 IEM_MC_END(); \
2004 return VINF_SUCCESS; \
2005 \
2006 case IEMMODE_32BIT: \
2007 IEM_MC_BEGIN(0, 2); \
2008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2009 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2011 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2012 a_Cnd { \
2013 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
2014 } IEM_MC_ELSE() { \
2015 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
2016 } IEM_MC_ENDIF(); \
2017 IEM_MC_ADVANCE_RIP(); \
2018 IEM_MC_END(); \
2019 return VINF_SUCCESS; \
2020 \
2021 case IEMMODE_64BIT: \
2022 IEM_MC_BEGIN(0, 2); \
2023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2024 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2026 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2027 a_Cnd { \
2028 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
2029 } IEM_MC_ENDIF(); \
2030 IEM_MC_ADVANCE_RIP(); \
2031 IEM_MC_END(); \
2032 return VINF_SUCCESS; \
2033 \
2034 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2035 } \
2036 } do {} while (0)
2037
2038
2039
2040/** Opcode 0x0f 0x40. */
2041FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2042{
2043 IEMOP_MNEMONIC("cmovo Gv,Ev");
2044 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2045}
2046
2047
2048/** Opcode 0x0f 0x41. */
2049FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2050{
2051 IEMOP_MNEMONIC("cmovno Gv,Ev");
2052 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2053}
2054
2055
2056/** Opcode 0x0f 0x42. */
2057FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2058{
2059 IEMOP_MNEMONIC("cmovc Gv,Ev");
2060 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2061}
2062
2063
2064/** Opcode 0x0f 0x43. */
2065FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2066{
2067 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2068 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2069}
2070
2071
2072/** Opcode 0x0f 0x44. */
2073FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2074{
2075 IEMOP_MNEMONIC("cmove Gv,Ev");
2076 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2077}
2078
2079
2080/** Opcode 0x0f 0x45. */
2081FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2082{
2083 IEMOP_MNEMONIC("cmovne Gv,Ev");
2084 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2085}
2086
2087
2088/** Opcode 0x0f 0x46. */
2089FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2090{
2091 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2092 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2093}
2094
2095
2096/** Opcode 0x0f 0x47. */
2097FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2098{
2099 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2100 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2101}
2102
2103
2104/** Opcode 0x0f 0x48. */
2105FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2106{
2107 IEMOP_MNEMONIC("cmovs Gv,Ev");
2108 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2109}
2110
2111
2112/** Opcode 0x0f 0x49. */
2113FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2114{
2115 IEMOP_MNEMONIC("cmovns Gv,Ev");
2116 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2117}
2118
2119
2120/** Opcode 0x0f 0x4a. */
2121FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2122{
2123 IEMOP_MNEMONIC("cmovp Gv,Ev");
2124 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2125}
2126
2127
2128/** Opcode 0x0f 0x4b. */
2129FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2130{
2131 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2132 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2133}
2134
2135
2136/** Opcode 0x0f 0x4c. */
2137FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2138{
2139 IEMOP_MNEMONIC("cmovl Gv,Ev");
2140 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2141}
2142
2143
2144/** Opcode 0x0f 0x4d. */
2145FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2146{
2147 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2148 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2149}
2150
2151
2152/** Opcode 0x0f 0x4e. */
2153FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2154{
2155 IEMOP_MNEMONIC("cmovle Gv,Ev");
2156 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2157}
2158
2159
2160/** Opcode 0x0f 0x4f. */
2161FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2162{
2163 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2164 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2165}
2166
2167#undef CMOV_X
2168
2169/** Opcode 0x0f 0x50. */
2170FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2171/** Opcode 0x0f 0x51. */
2172FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2173/** Opcode 0x0f 0x52. */
2174FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2175/** Opcode 0x0f 0x53. */
2176FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2177/** Opcode 0x0f 0x54. */
2178FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2179/** Opcode 0x0f 0x55. */
2180FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2181/** Opcode 0x0f 0x56. */
2182FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2183/** Opcode 0x0f 0x57. */
2184FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2185/** Opcode 0x0f 0x58. */
2186FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2187/** Opcode 0x0f 0x59. */
2188FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2189/** Opcode 0x0f 0x5a. */
2190FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2191/** Opcode 0x0f 0x5b. */
2192FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2193/** Opcode 0x0f 0x5c. */
2194FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2195/** Opcode 0x0f 0x5d. */
2196FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2197/** Opcode 0x0f 0x5e. */
2198FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2199/** Opcode 0x0f 0x5f. */
2200FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2201
2202
2203/**
2204 * Common worker for SSE2 and MMX instructions on the forms:
2205 * pxxxx xmm1, xmm2/mem128
2206 * pxxxx mm1, mm2/mem32
2207 *
2208 * The 2nd operand is the first half of a register, which in the memory case
2209 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2210 * memory accessed for MMX.
2211 *
2212 * Exceptions type 4.
2213 */
2214FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2215{
2216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2217 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2218 {
2219 case IEM_OP_PRF_SIZE_OP: /* SSE */
2220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2221 {
2222 /*
2223 * Register, register.
2224 */
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_BEGIN(2, 0);
2227 IEM_MC_ARG(uint128_t *, pDst, 0);
2228 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2229 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2230 IEM_MC_PREPARE_SSE_USAGE();
2231 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2232 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2233 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2234 IEM_MC_ADVANCE_RIP();
2235 IEM_MC_END();
2236 }
2237 else
2238 {
2239 /*
2240 * Register, memory.
2241 */
2242 IEM_MC_BEGIN(2, 2);
2243 IEM_MC_ARG(uint128_t *, pDst, 0);
2244 IEM_MC_LOCAL(uint64_t, uSrc);
2245 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2247
2248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2250 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2251 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2252
2253 IEM_MC_PREPARE_SSE_USAGE();
2254 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2255 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2256
2257 IEM_MC_ADVANCE_RIP();
2258 IEM_MC_END();
2259 }
2260 return VINF_SUCCESS;
2261
2262 case 0: /* MMX */
2263 if (!pImpl->pfnU64)
2264 return IEMOP_RAISE_INVALID_OPCODE();
2265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2266 {
2267 /*
2268 * Register, register.
2269 */
2270 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2271 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_BEGIN(2, 0);
2274 IEM_MC_ARG(uint64_t *, pDst, 0);
2275 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2276 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2277 IEM_MC_PREPARE_FPU_USAGE();
2278 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2279 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2280 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2281 IEM_MC_ADVANCE_RIP();
2282 IEM_MC_END();
2283 }
2284 else
2285 {
2286 /*
2287 * Register, memory.
2288 */
2289 IEM_MC_BEGIN(2, 2);
2290 IEM_MC_ARG(uint64_t *, pDst, 0);
2291 IEM_MC_LOCAL(uint32_t, uSrc);
2292 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2294
2295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2297 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2298 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2299
2300 IEM_MC_PREPARE_FPU_USAGE();
2301 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2302 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2303
2304 IEM_MC_ADVANCE_RIP();
2305 IEM_MC_END();
2306 }
2307 return VINF_SUCCESS;
2308
2309 default:
2310 return IEMOP_RAISE_INVALID_OPCODE();
2311 }
2312}
2313
2314
2315/** Opcode 0x0f 0x60. */
2316FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2317{
2318 IEMOP_MNEMONIC("punpcklbw");
2319 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2320}
2321
2322
2323/** Opcode 0x0f 0x61. */
2324FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2325{
2326 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2327 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2328}
2329
2330
2331/** Opcode 0x0f 0x62. */
2332FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2333{
2334 IEMOP_MNEMONIC("punpckldq");
2335 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2336}
2337
2338
2339/** Opcode 0x0f 0x63. */
2340FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2341/** Opcode 0x0f 0x64. */
2342FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2343/** Opcode 0x0f 0x65. */
2344FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2345/** Opcode 0x0f 0x66. */
2346FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2347/** Opcode 0x0f 0x67. */
2348FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2349
2350
2351/**
2352 * Common worker for SSE2 and MMX instructions on the forms:
2353 * pxxxx xmm1, xmm2/mem128
2354 * pxxxx mm1, mm2/mem64
2355 *
2356 * The 2nd operand is the second half of a register, which in the memory case
2357 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2358 * where it may read the full 128 bits or only the upper 64 bits.
2359 *
2360 * Exceptions type 4.
2361 */
2362FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2363{
2364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2365 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2366 {
2367 case IEM_OP_PRF_SIZE_OP: /* SSE */
2368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2369 {
2370 /*
2371 * Register, register.
2372 */
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_BEGIN(2, 0);
2375 IEM_MC_ARG(uint128_t *, pDst, 0);
2376 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2377 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2378 IEM_MC_PREPARE_SSE_USAGE();
2379 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2380 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2381 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2382 IEM_MC_ADVANCE_RIP();
2383 IEM_MC_END();
2384 }
2385 else
2386 {
2387 /*
2388 * Register, memory.
2389 */
2390 IEM_MC_BEGIN(2, 2);
2391 IEM_MC_ARG(uint128_t *, pDst, 0);
2392 IEM_MC_LOCAL(uint128_t, uSrc);
2393 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2395
2396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2398 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2399 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2400
2401 IEM_MC_PREPARE_SSE_USAGE();
2402 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2403 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2404
2405 IEM_MC_ADVANCE_RIP();
2406 IEM_MC_END();
2407 }
2408 return VINF_SUCCESS;
2409
2410 case 0: /* MMX */
2411 if (!pImpl->pfnU64)
2412 return IEMOP_RAISE_INVALID_OPCODE();
2413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2414 {
2415 /*
2416 * Register, register.
2417 */
2418 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2419 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2421 IEM_MC_BEGIN(2, 0);
2422 IEM_MC_ARG(uint64_t *, pDst, 0);
2423 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2425 IEM_MC_PREPARE_FPU_USAGE();
2426 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2427 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2428 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2429 IEM_MC_ADVANCE_RIP();
2430 IEM_MC_END();
2431 }
2432 else
2433 {
2434 /*
2435 * Register, memory.
2436 */
2437 IEM_MC_BEGIN(2, 2);
2438 IEM_MC_ARG(uint64_t *, pDst, 0);
2439 IEM_MC_LOCAL(uint64_t, uSrc);
2440 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2442
2443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2445 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2446 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2447
2448 IEM_MC_PREPARE_FPU_USAGE();
2449 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2450 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2451
2452 IEM_MC_ADVANCE_RIP();
2453 IEM_MC_END();
2454 }
2455 return VINF_SUCCESS;
2456
2457 default:
2458 return IEMOP_RAISE_INVALID_OPCODE();
2459 }
2460}
2461
2462
2463/** Opcode 0x0f 0x68. */
2464FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2465{
2466 IEMOP_MNEMONIC("punpckhbw");
2467 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2468}
2469
2470
2471/** Opcode 0x0f 0x69. */
2472FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2473{
2474 IEMOP_MNEMONIC("punpckhwd");
2475 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2476}
2477
2478
2479/** Opcode 0x0f 0x6a. */
2480FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2481{
2482 IEMOP_MNEMONIC("punpckhdq");
2483 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2484}
2485
2486/** Opcode 0x0f 0x6b. */
2487FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2488
2489
2490/** Opcode 0x0f 0x6c. */
2491FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2492{
2493 IEMOP_MNEMONIC("punpcklqdq");
2494 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2495}
2496
2497
2498/** Opcode 0x0f 0x6d. */
2499FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2500{
2501 IEMOP_MNEMONIC("punpckhqdq");
2502 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2503}
2504
2505
2506/** Opcode 0x0f 0x6e. */
2507FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2508{
2509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2510 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2511 {
2512 case IEM_OP_PRF_SIZE_OP: /* SSE */
2513 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2515 {
2516 /* XMM, greg*/
2517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2518 IEM_MC_BEGIN(0, 1);
2519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2520 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2521 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2522 {
2523 IEM_MC_LOCAL(uint64_t, u64Tmp);
2524 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2525 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2526 }
2527 else
2528 {
2529 IEM_MC_LOCAL(uint32_t, u32Tmp);
2530 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2531 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2532 }
2533 IEM_MC_ADVANCE_RIP();
2534 IEM_MC_END();
2535 }
2536 else
2537 {
2538 /* XMM, [mem] */
2539 IEM_MC_BEGIN(0, 2);
2540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2545 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2546 {
2547 IEM_MC_LOCAL(uint64_t, u64Tmp);
2548 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2549 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2550 }
2551 else
2552 {
2553 IEM_MC_LOCAL(uint32_t, u32Tmp);
2554 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2555 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2556 }
2557 IEM_MC_ADVANCE_RIP();
2558 IEM_MC_END();
2559 }
2560 return VINF_SUCCESS;
2561
2562 case 0: /* MMX */
2563 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2565 {
2566 /* MMX, greg */
2567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2568 IEM_MC_BEGIN(0, 1);
2569 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2570 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2571 IEM_MC_LOCAL(uint64_t, u64Tmp);
2572 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2573 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2574 else
2575 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2576 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2577 IEM_MC_ADVANCE_RIP();
2578 IEM_MC_END();
2579 }
2580 else
2581 {
2582 /* MMX, [mem] */
2583 IEM_MC_BEGIN(0, 2);
2584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2585 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2589 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2590 {
2591 IEM_MC_LOCAL(uint64_t, u64Tmp);
2592 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2593 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2594 }
2595 else
2596 {
2597 IEM_MC_LOCAL(uint32_t, u32Tmp);
2598 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2599 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2600 }
2601 IEM_MC_ADVANCE_RIP();
2602 IEM_MC_END();
2603 }
2604 return VINF_SUCCESS;
2605
2606 default:
2607 return IEMOP_RAISE_INVALID_OPCODE();
2608 }
2609}
2610
2611
2612/** Opcode 0x0f 0x6f. */
2613FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2614{
2615 bool fAligned = false;
2616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2617 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2618 {
2619 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2620 fAligned = true;
2621 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2622 if (fAligned)
2623 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2624 else
2625 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2627 {
2628 /*
2629 * Register, register.
2630 */
2631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2632 IEM_MC_BEGIN(0, 0);
2633 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2635 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
2636 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2637 IEM_MC_ADVANCE_RIP();
2638 IEM_MC_END();
2639 }
2640 else
2641 {
2642 /*
2643 * Register, memory.
2644 */
2645 IEM_MC_BEGIN(0, 2);
2646 IEM_MC_LOCAL(uint128_t, u128Tmp);
2647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2648
2649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2653 if (fAligned)
2654 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2655 else
2656 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2657 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2658
2659 IEM_MC_ADVANCE_RIP();
2660 IEM_MC_END();
2661 }
2662 return VINF_SUCCESS;
2663
2664 case 0: /* MMX */
2665 IEMOP_MNEMONIC("movq Pq,Qq");
2666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2667 {
2668 /*
2669 * Register, register.
2670 */
2671 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2672 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2674 IEM_MC_BEGIN(0, 1);
2675 IEM_MC_LOCAL(uint64_t, u64Tmp);
2676 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2677 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2678 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2679 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2680 IEM_MC_ADVANCE_RIP();
2681 IEM_MC_END();
2682 }
2683 else
2684 {
2685 /*
2686 * Register, memory.
2687 */
2688 IEM_MC_BEGIN(0, 2);
2689 IEM_MC_LOCAL(uint64_t, u64Tmp);
2690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2691
2692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2694 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2695 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2696 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2697 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2698
2699 IEM_MC_ADVANCE_RIP();
2700 IEM_MC_END();
2701 }
2702 return VINF_SUCCESS;
2703
2704 default:
2705 return IEMOP_RAISE_INVALID_OPCODE();
2706 }
2707}
2708
2709
2710/** Opcode 0x0f 0x70. The immediate here is evil! */
2711FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2712{
2713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2714 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2715 {
2716 case IEM_OP_PRF_SIZE_OP: /* SSE */
2717 case IEM_OP_PRF_REPNZ: /* SSE */
2718 case IEM_OP_PRF_REPZ: /* SSE */
2719 {
2720 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2721 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2722 {
2723 case IEM_OP_PRF_SIZE_OP:
2724 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2725 pfnAImpl = iemAImpl_pshufd;
2726 break;
2727 case IEM_OP_PRF_REPNZ:
2728 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2729 pfnAImpl = iemAImpl_pshuflw;
2730 break;
2731 case IEM_OP_PRF_REPZ:
2732 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2733 pfnAImpl = iemAImpl_pshufhw;
2734 break;
2735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2736 }
2737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2738 {
2739 /*
2740 * Register, register.
2741 */
2742 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2744
2745 IEM_MC_BEGIN(3, 0);
2746 IEM_MC_ARG(uint128_t *, pDst, 0);
2747 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2748 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2750 IEM_MC_PREPARE_SSE_USAGE();
2751 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2752 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2753 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2754 IEM_MC_ADVANCE_RIP();
2755 IEM_MC_END();
2756 }
2757 else
2758 {
2759 /*
2760 * Register, memory.
2761 */
2762 IEM_MC_BEGIN(3, 2);
2763 IEM_MC_ARG(uint128_t *, pDst, 0);
2764 IEM_MC_LOCAL(uint128_t, uSrc);
2765 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2767
2768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2769 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2770 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2773
2774 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2775 IEM_MC_PREPARE_SSE_USAGE();
2776 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2777 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2778
2779 IEM_MC_ADVANCE_RIP();
2780 IEM_MC_END();
2781 }
2782 return VINF_SUCCESS;
2783 }
2784
2785 case 0: /* MMX Extension */
2786 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2788 {
2789 /*
2790 * Register, register.
2791 */
2792 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2794
2795 IEM_MC_BEGIN(3, 0);
2796 IEM_MC_ARG(uint64_t *, pDst, 0);
2797 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2798 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2799 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2800 IEM_MC_PREPARE_FPU_USAGE();
2801 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2802 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2803 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2804 IEM_MC_ADVANCE_RIP();
2805 IEM_MC_END();
2806 }
2807 else
2808 {
2809 /*
2810 * Register, memory.
2811 */
2812 IEM_MC_BEGIN(3, 2);
2813 IEM_MC_ARG(uint64_t *, pDst, 0);
2814 IEM_MC_LOCAL(uint64_t, uSrc);
2815 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2817
2818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2819 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2820 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2823
2824 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2825 IEM_MC_PREPARE_FPU_USAGE();
2826 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2827 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2828
2829 IEM_MC_ADVANCE_RIP();
2830 IEM_MC_END();
2831 }
2832 return VINF_SUCCESS;
2833
2834 default:
2835 return IEMOP_RAISE_INVALID_OPCODE();
2836 }
2837}
2838
2839
2840/** Opcode 0x0f 0x71 11/2. */
2841FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2842
2843/** Opcode 0x66 0x0f 0x71 11/2. */
2844FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2845
2846/** Opcode 0x0f 0x71 11/4. */
2847FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2848
2849/** Opcode 0x66 0x0f 0x71 11/4. */
2850FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2851
2852/** Opcode 0x0f 0x71 11/6. */
2853FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2854
2855/** Opcode 0x66 0x0f 0x71 11/6. */
2856FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2857
2858
2859/** Opcode 0x0f 0x71. */
2860FNIEMOP_DEF(iemOp_Grp12)
2861{
2862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2863 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2864 return IEMOP_RAISE_INVALID_OPCODE();
2865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2866 {
2867 case 0: case 1: case 3: case 5: case 7:
2868 return IEMOP_RAISE_INVALID_OPCODE();
2869 case 2:
2870 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2871 {
2872 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2873 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2874 default: return IEMOP_RAISE_INVALID_OPCODE();
2875 }
2876 case 4:
2877 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2878 {
2879 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2880 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2881 default: return IEMOP_RAISE_INVALID_OPCODE();
2882 }
2883 case 6:
2884 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2885 {
2886 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2887 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2888 default: return IEMOP_RAISE_INVALID_OPCODE();
2889 }
2890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2891 }
2892}
2893
2894
2895/** Opcode 0x0f 0x72 11/2. */
2896FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2897
2898/** Opcode 0x66 0x0f 0x72 11/2. */
2899FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2900
2901/** Opcode 0x0f 0x72 11/4. */
2902FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2903
2904/** Opcode 0x66 0x0f 0x72 11/4. */
2905FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2906
2907/** Opcode 0x0f 0x72 11/6. */
2908FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2909
2910/** Opcode 0x66 0x0f 0x72 11/6. */
2911FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2912
2913
2914/** Opcode 0x0f 0x72. */
2915FNIEMOP_DEF(iemOp_Grp13)
2916{
2917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2918 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2919 return IEMOP_RAISE_INVALID_OPCODE();
2920 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2921 {
2922 case 0: case 1: case 3: case 5: case 7:
2923 return IEMOP_RAISE_INVALID_OPCODE();
2924 case 2:
2925 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2926 {
2927 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2928 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2929 default: return IEMOP_RAISE_INVALID_OPCODE();
2930 }
2931 case 4:
2932 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2933 {
2934 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2935 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2936 default: return IEMOP_RAISE_INVALID_OPCODE();
2937 }
2938 case 6:
2939 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2940 {
2941 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2942 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2943 default: return IEMOP_RAISE_INVALID_OPCODE();
2944 }
2945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2946 }
2947}
2948
2949
2950/** Opcode 0x0f 0x73 11/2. */
2951FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2952
2953/** Opcode 0x66 0x0f 0x73 11/2. */
2954FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2955
2956/** Opcode 0x66 0x0f 0x73 11/3. */
2957FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2958
2959/** Opcode 0x0f 0x73 11/6. */
2960FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2961
2962/** Opcode 0x66 0x0f 0x73 11/6. */
2963FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2964
2965/** Opcode 0x66 0x0f 0x73 11/7. */
2966FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2967
2968
2969/** Opcode 0x0f 0x73. */
2970FNIEMOP_DEF(iemOp_Grp14)
2971{
2972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2973 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2974 return IEMOP_RAISE_INVALID_OPCODE();
2975 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2976 {
2977 case 0: case 1: case 4: case 5:
2978 return IEMOP_RAISE_INVALID_OPCODE();
2979 case 2:
2980 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2981 {
2982 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2983 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2984 default: return IEMOP_RAISE_INVALID_OPCODE();
2985 }
2986 case 3:
2987 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2988 {
2989 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2990 default: return IEMOP_RAISE_INVALID_OPCODE();
2991 }
2992 case 6:
2993 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2994 {
2995 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2996 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2997 default: return IEMOP_RAISE_INVALID_OPCODE();
2998 }
2999 case 7:
3000 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3001 {
3002 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3003 default: return IEMOP_RAISE_INVALID_OPCODE();
3004 }
3005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3006 }
3007}
3008
3009
3010/**
3011 * Common worker for SSE2 and MMX instructions on the forms:
3012 * pxxx mm1, mm2/mem64
3013 * pxxx xmm1, xmm2/mem128
3014 *
3015 * Proper alignment of the 128-bit operand is enforced.
3016 * Exceptions type 4. SSE2 and MMX cpuid checks.
3017 */
3018FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3019{
3020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3021 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3022 {
3023 case IEM_OP_PRF_SIZE_OP: /* SSE */
3024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3025 {
3026 /*
3027 * Register, register.
3028 */
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3030 IEM_MC_BEGIN(2, 0);
3031 IEM_MC_ARG(uint128_t *, pDst, 0);
3032 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3033 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3034 IEM_MC_PREPARE_SSE_USAGE();
3035 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3036 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3037 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3038 IEM_MC_ADVANCE_RIP();
3039 IEM_MC_END();
3040 }
3041 else
3042 {
3043 /*
3044 * Register, memory.
3045 */
3046 IEM_MC_BEGIN(2, 2);
3047 IEM_MC_ARG(uint128_t *, pDst, 0);
3048 IEM_MC_LOCAL(uint128_t, uSrc);
3049 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3051
3052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3055 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3056
3057 IEM_MC_PREPARE_SSE_USAGE();
3058 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3059 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3060
3061 IEM_MC_ADVANCE_RIP();
3062 IEM_MC_END();
3063 }
3064 return VINF_SUCCESS;
3065
3066 case 0: /* MMX */
3067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3068 {
3069 /*
3070 * Register, register.
3071 */
3072 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3073 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_BEGIN(2, 0);
3076 IEM_MC_ARG(uint64_t *, pDst, 0);
3077 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3078 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3079 IEM_MC_PREPARE_FPU_USAGE();
3080 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3081 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3082 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 }
3086 else
3087 {
3088 /*
3089 * Register, memory.
3090 */
3091 IEM_MC_BEGIN(2, 2);
3092 IEM_MC_ARG(uint64_t *, pDst, 0);
3093 IEM_MC_LOCAL(uint64_t, uSrc);
3094 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3096
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3100 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3101
3102 IEM_MC_PREPARE_FPU_USAGE();
3103 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3104 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3105
3106 IEM_MC_ADVANCE_RIP();
3107 IEM_MC_END();
3108 }
3109 return VINF_SUCCESS;
3110
3111 default:
3112 return IEMOP_RAISE_INVALID_OPCODE();
3113 }
3114}
3115
3116
3117/** Opcode 0x0f 0x74. */
3118FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3119{
3120 IEMOP_MNEMONIC("pcmpeqb");
3121 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3122}
3123
3124
3125/** Opcode 0x0f 0x75. */
3126FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3127{
3128 IEMOP_MNEMONIC("pcmpeqw");
3129 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3130}
3131
3132
3133/** Opcode 0x0f 0x76. */
3134FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3135{
3136 IEMOP_MNEMONIC("pcmpeqd");
3137 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3138}
3139
3140
3141/** Opcode 0x0f 0x77. */
3142FNIEMOP_STUB(iemOp_emms);
3143/** Opcode 0x0f 0x78. */
3144FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3145/** Opcode 0x0f 0x79. */
3146FNIEMOP_UD_STUB(iemOp_vmwrite);
3147/** Opcode 0x0f 0x7c. */
3148FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3149/** Opcode 0x0f 0x7d. */
3150FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3151
3152
3153/** Opcode 0x0f 0x7e. */
3154FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3155{
3156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3157 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3158 {
3159 case IEM_OP_PRF_SIZE_OP: /* SSE */
3160 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3162 {
3163 /* greg, XMM */
3164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3165 IEM_MC_BEGIN(0, 1);
3166 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3167 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3168 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3169 {
3170 IEM_MC_LOCAL(uint64_t, u64Tmp);
3171 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3172 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3173 }
3174 else
3175 {
3176 IEM_MC_LOCAL(uint32_t, u32Tmp);
3177 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3178 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3179 }
3180 IEM_MC_ADVANCE_RIP();
3181 IEM_MC_END();
3182 }
3183 else
3184 {
3185 /* [mem], XMM */
3186 IEM_MC_BEGIN(0, 2);
3187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3188 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3191 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3192 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3193 {
3194 IEM_MC_LOCAL(uint64_t, u64Tmp);
3195 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3196 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3197 }
3198 else
3199 {
3200 IEM_MC_LOCAL(uint32_t, u32Tmp);
3201 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3202 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3203 }
3204 IEM_MC_ADVANCE_RIP();
3205 IEM_MC_END();
3206 }
3207 return VINF_SUCCESS;
3208
3209 case 0: /* MMX */
3210 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3212 {
3213 /* greg, MMX */
3214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3215 IEM_MC_BEGIN(0, 1);
3216 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3217 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3218 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3219 {
3220 IEM_MC_LOCAL(uint64_t, u64Tmp);
3221 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3223 }
3224 else
3225 {
3226 IEM_MC_LOCAL(uint32_t, u32Tmp);
3227 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3228 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3229 }
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 /* [mem], MMX */
3236 IEM_MC_BEGIN(0, 2);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3238 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3241 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3242 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3243 {
3244 IEM_MC_LOCAL(uint64_t, u64Tmp);
3245 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3246 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3247 }
3248 else
3249 {
3250 IEM_MC_LOCAL(uint32_t, u32Tmp);
3251 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3252 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3253 }
3254 IEM_MC_ADVANCE_RIP();
3255 IEM_MC_END();
3256 }
3257 return VINF_SUCCESS;
3258
3259 default:
3260 return IEMOP_RAISE_INVALID_OPCODE();
3261 }
3262}
3263
3264
3265/** Opcode 0x0f 0x7f. */
3266FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3267{
3268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3269 bool fAligned = false;
3270 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3271 {
3272 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3273 fAligned = true;
3274 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3275 if (fAligned)
3276 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3277 else
3278 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3280 {
3281 /*
3282 * Register, register.
3283 */
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 IEM_MC_BEGIN(0, 0);
3286 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3287 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3288 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
3289 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3290 IEM_MC_ADVANCE_RIP();
3291 IEM_MC_END();
3292 }
3293 else
3294 {
3295 /*
3296 * Register, memory.
3297 */
3298 IEM_MC_BEGIN(0, 2);
3299 IEM_MC_LOCAL(uint128_t, u128Tmp);
3300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3301
3302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3304 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3305 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3306
3307 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3308 if (fAligned)
3309 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3310 else
3311 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3312
3313 IEM_MC_ADVANCE_RIP();
3314 IEM_MC_END();
3315 }
3316 return VINF_SUCCESS;
3317
3318 case 0: /* MMX */
3319 IEMOP_MNEMONIC("movq Qq,Pq");
3320
3321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3322 {
3323 /*
3324 * Register, register.
3325 */
3326 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3327 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 IEM_MC_BEGIN(0, 1);
3330 IEM_MC_LOCAL(uint64_t, u64Tmp);
3331 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3333 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3334 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3335 IEM_MC_ADVANCE_RIP();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory.
3342 */
3343 IEM_MC_BEGIN(0, 2);
3344 IEM_MC_LOCAL(uint64_t, u64Tmp);
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3346
3347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3349 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3350 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3351
3352 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3353 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3354
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 }
3358 return VINF_SUCCESS;
3359
3360 default:
3361 return IEMOP_RAISE_INVALID_OPCODE();
3362 }
3363}
3364
3365
3366
3367/** Opcode 0x0f 0x80. */
3368FNIEMOP_DEF(iemOp_jo_Jv)
3369{
3370 IEMOP_MNEMONIC("jo Jv");
3371 IEMOP_HLP_MIN_386();
3372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3373 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3374 {
3375 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3376 IEMOP_HLP_NO_LOCK_PREFIX();
3377
3378 IEM_MC_BEGIN(0, 0);
3379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3380 IEM_MC_REL_JMP_S16(i16Imm);
3381 } IEM_MC_ELSE() {
3382 IEM_MC_ADVANCE_RIP();
3383 } IEM_MC_ENDIF();
3384 IEM_MC_END();
3385 }
3386 else
3387 {
3388 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3389 IEMOP_HLP_NO_LOCK_PREFIX();
3390
3391 IEM_MC_BEGIN(0, 0);
3392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3393 IEM_MC_REL_JMP_S32(i32Imm);
3394 } IEM_MC_ELSE() {
3395 IEM_MC_ADVANCE_RIP();
3396 } IEM_MC_ENDIF();
3397 IEM_MC_END();
3398 }
3399 return VINF_SUCCESS;
3400}
3401
3402
3403/** Opcode 0x0f 0x81. */
3404FNIEMOP_DEF(iemOp_jno_Jv)
3405{
3406 IEMOP_MNEMONIC("jno Jv");
3407 IEMOP_HLP_MIN_386();
3408 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3409 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3410 {
3411 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3412 IEMOP_HLP_NO_LOCK_PREFIX();
3413
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3416 IEM_MC_ADVANCE_RIP();
3417 } IEM_MC_ELSE() {
3418 IEM_MC_REL_JMP_S16(i16Imm);
3419 } IEM_MC_ENDIF();
3420 IEM_MC_END();
3421 }
3422 else
3423 {
3424 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3425 IEMOP_HLP_NO_LOCK_PREFIX();
3426
3427 IEM_MC_BEGIN(0, 0);
3428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3429 IEM_MC_ADVANCE_RIP();
3430 } IEM_MC_ELSE() {
3431 IEM_MC_REL_JMP_S32(i32Imm);
3432 } IEM_MC_ENDIF();
3433 IEM_MC_END();
3434 }
3435 return VINF_SUCCESS;
3436}
3437
3438
3439/** Opcode 0x0f 0x82. */
3440FNIEMOP_DEF(iemOp_jc_Jv)
3441{
3442 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3443 IEMOP_HLP_MIN_386();
3444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3445 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3446 {
3447 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3448 IEMOP_HLP_NO_LOCK_PREFIX();
3449
3450 IEM_MC_BEGIN(0, 0);
3451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3452 IEM_MC_REL_JMP_S16(i16Imm);
3453 } IEM_MC_ELSE() {
3454 IEM_MC_ADVANCE_RIP();
3455 } IEM_MC_ENDIF();
3456 IEM_MC_END();
3457 }
3458 else
3459 {
3460 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3461 IEMOP_HLP_NO_LOCK_PREFIX();
3462
3463 IEM_MC_BEGIN(0, 0);
3464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3465 IEM_MC_REL_JMP_S32(i32Imm);
3466 } IEM_MC_ELSE() {
3467 IEM_MC_ADVANCE_RIP();
3468 } IEM_MC_ENDIF();
3469 IEM_MC_END();
3470 }
3471 return VINF_SUCCESS;
3472}
3473
3474
3475/** Opcode 0x0f 0x83. */
3476FNIEMOP_DEF(iemOp_jnc_Jv)
3477{
3478 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3479 IEMOP_HLP_MIN_386();
3480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3481 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3482 {
3483 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3484 IEMOP_HLP_NO_LOCK_PREFIX();
3485
3486 IEM_MC_BEGIN(0, 0);
3487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3488 IEM_MC_ADVANCE_RIP();
3489 } IEM_MC_ELSE() {
3490 IEM_MC_REL_JMP_S16(i16Imm);
3491 } IEM_MC_ENDIF();
3492 IEM_MC_END();
3493 }
3494 else
3495 {
3496 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3497 IEMOP_HLP_NO_LOCK_PREFIX();
3498
3499 IEM_MC_BEGIN(0, 0);
3500 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3501 IEM_MC_ADVANCE_RIP();
3502 } IEM_MC_ELSE() {
3503 IEM_MC_REL_JMP_S32(i32Imm);
3504 } IEM_MC_ENDIF();
3505 IEM_MC_END();
3506 }
3507 return VINF_SUCCESS;
3508}
3509
3510
3511/** Opcode 0x0f 0x84. */
3512FNIEMOP_DEF(iemOp_je_Jv)
3513{
3514 IEMOP_MNEMONIC("je/jz Jv");
3515 IEMOP_HLP_MIN_386();
3516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3517 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3518 {
3519 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3520 IEMOP_HLP_NO_LOCK_PREFIX();
3521
3522 IEM_MC_BEGIN(0, 0);
3523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3524 IEM_MC_REL_JMP_S16(i16Imm);
3525 } IEM_MC_ELSE() {
3526 IEM_MC_ADVANCE_RIP();
3527 } IEM_MC_ENDIF();
3528 IEM_MC_END();
3529 }
3530 else
3531 {
3532 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3533 IEMOP_HLP_NO_LOCK_PREFIX();
3534
3535 IEM_MC_BEGIN(0, 0);
3536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3537 IEM_MC_REL_JMP_S32(i32Imm);
3538 } IEM_MC_ELSE() {
3539 IEM_MC_ADVANCE_RIP();
3540 } IEM_MC_ENDIF();
3541 IEM_MC_END();
3542 }
3543 return VINF_SUCCESS;
3544}
3545
3546
3547/** Opcode 0x0f 0x85. */
3548FNIEMOP_DEF(iemOp_jne_Jv)
3549{
3550 IEMOP_MNEMONIC("jne/jnz Jv");
3551 IEMOP_HLP_MIN_386();
3552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3553 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3554 {
3555 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3556 IEMOP_HLP_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(0, 0);
3559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3560 IEM_MC_ADVANCE_RIP();
3561 } IEM_MC_ELSE() {
3562 IEM_MC_REL_JMP_S16(i16Imm);
3563 } IEM_MC_ENDIF();
3564 IEM_MC_END();
3565 }
3566 else
3567 {
3568 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3569 IEMOP_HLP_NO_LOCK_PREFIX();
3570
3571 IEM_MC_BEGIN(0, 0);
3572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3573 IEM_MC_ADVANCE_RIP();
3574 } IEM_MC_ELSE() {
3575 IEM_MC_REL_JMP_S32(i32Imm);
3576 } IEM_MC_ENDIF();
3577 IEM_MC_END();
3578 }
3579 return VINF_SUCCESS;
3580}
3581
3582
3583/** Opcode 0x0f 0x86. */
3584FNIEMOP_DEF(iemOp_jbe_Jv)
3585{
3586 IEMOP_MNEMONIC("jbe/jna Jv");
3587 IEMOP_HLP_MIN_386();
3588 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3589 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3590 {
3591 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3592 IEMOP_HLP_NO_LOCK_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0);
3595 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3596 IEM_MC_REL_JMP_S16(i16Imm);
3597 } IEM_MC_ELSE() {
3598 IEM_MC_ADVANCE_RIP();
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601 }
3602 else
3603 {
3604 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3605 IEMOP_HLP_NO_LOCK_PREFIX();
3606
3607 IEM_MC_BEGIN(0, 0);
3608 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3609 IEM_MC_REL_JMP_S32(i32Imm);
3610 } IEM_MC_ELSE() {
3611 IEM_MC_ADVANCE_RIP();
3612 } IEM_MC_ENDIF();
3613 IEM_MC_END();
3614 }
3615 return VINF_SUCCESS;
3616}
3617
3618
3619/** Opcode 0x0f 0x87. */
3620FNIEMOP_DEF(iemOp_jnbe_Jv)
3621{
3622 IEMOP_MNEMONIC("jnbe/ja Jv");
3623 IEMOP_HLP_MIN_386();
3624 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3625 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3626 {
3627 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3628 IEMOP_HLP_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 0);
3631 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3632 IEM_MC_ADVANCE_RIP();
3633 } IEM_MC_ELSE() {
3634 IEM_MC_REL_JMP_S16(i16Imm);
3635 } IEM_MC_ENDIF();
3636 IEM_MC_END();
3637 }
3638 else
3639 {
3640 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3641 IEMOP_HLP_NO_LOCK_PREFIX();
3642
3643 IEM_MC_BEGIN(0, 0);
3644 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3645 IEM_MC_ADVANCE_RIP();
3646 } IEM_MC_ELSE() {
3647 IEM_MC_REL_JMP_S32(i32Imm);
3648 } IEM_MC_ENDIF();
3649 IEM_MC_END();
3650 }
3651 return VINF_SUCCESS;
3652}
3653
3654
3655/** Opcode 0x0f 0x88. */
3656FNIEMOP_DEF(iemOp_js_Jv)
3657{
3658 IEMOP_MNEMONIC("js Jv");
3659 IEMOP_HLP_MIN_386();
3660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3661 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3662 {
3663 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3664 IEMOP_HLP_NO_LOCK_PREFIX();
3665
3666 IEM_MC_BEGIN(0, 0);
3667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3668 IEM_MC_REL_JMP_S16(i16Imm);
3669 } IEM_MC_ELSE() {
3670 IEM_MC_ADVANCE_RIP();
3671 } IEM_MC_ENDIF();
3672 IEM_MC_END();
3673 }
3674 else
3675 {
3676 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3677 IEMOP_HLP_NO_LOCK_PREFIX();
3678
3679 IEM_MC_BEGIN(0, 0);
3680 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3681 IEM_MC_REL_JMP_S32(i32Imm);
3682 } IEM_MC_ELSE() {
3683 IEM_MC_ADVANCE_RIP();
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686 }
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** Opcode 0x0f 0x89. */
3692FNIEMOP_DEF(iemOp_jns_Jv)
3693{
3694 IEMOP_MNEMONIC("jns Jv");
3695 IEMOP_HLP_MIN_386();
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3697 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3698 {
3699 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3700 IEMOP_HLP_NO_LOCK_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0);
3703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3704 IEM_MC_ADVANCE_RIP();
3705 } IEM_MC_ELSE() {
3706 IEM_MC_REL_JMP_S16(i16Imm);
3707 } IEM_MC_ENDIF();
3708 IEM_MC_END();
3709 }
3710 else
3711 {
3712 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3713 IEMOP_HLP_NO_LOCK_PREFIX();
3714
3715 IEM_MC_BEGIN(0, 0);
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3717 IEM_MC_ADVANCE_RIP();
3718 } IEM_MC_ELSE() {
3719 IEM_MC_REL_JMP_S32(i32Imm);
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722 }
3723 return VINF_SUCCESS;
3724}
3725
3726
3727/** Opcode 0x0f 0x8a. */
3728FNIEMOP_DEF(iemOp_jp_Jv)
3729{
3730 IEMOP_MNEMONIC("jp Jv");
3731 IEMOP_HLP_MIN_386();
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3733 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3734 {
3735 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3736 IEMOP_HLP_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3740 IEM_MC_REL_JMP_S16(i16Imm);
3741 } IEM_MC_ELSE() {
3742 IEM_MC_ADVANCE_RIP();
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745 }
3746 else
3747 {
3748 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3749 IEMOP_HLP_NO_LOCK_PREFIX();
3750
3751 IEM_MC_BEGIN(0, 0);
3752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3753 IEM_MC_REL_JMP_S32(i32Imm);
3754 } IEM_MC_ELSE() {
3755 IEM_MC_ADVANCE_RIP();
3756 } IEM_MC_ENDIF();
3757 IEM_MC_END();
3758 }
3759 return VINF_SUCCESS;
3760}
3761
3762
3763/** Opcode 0x0f 0x8b. */
3764FNIEMOP_DEF(iemOp_jnp_Jv)
3765{
3766 IEMOP_MNEMONIC("jo Jv");
3767 IEMOP_HLP_MIN_386();
3768 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3769 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3770 {
3771 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3772 IEMOP_HLP_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3776 IEM_MC_ADVANCE_RIP();
3777 } IEM_MC_ELSE() {
3778 IEM_MC_REL_JMP_S16(i16Imm);
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3785 IEMOP_HLP_NO_LOCK_PREFIX();
3786
3787 IEM_MC_BEGIN(0, 0);
3788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3789 IEM_MC_ADVANCE_RIP();
3790 } IEM_MC_ELSE() {
3791 IEM_MC_REL_JMP_S32(i32Imm);
3792 } IEM_MC_ENDIF();
3793 IEM_MC_END();
3794 }
3795 return VINF_SUCCESS;
3796}
3797
3798
3799/** Opcode 0x0f 0x8c. */
3800FNIEMOP_DEF(iemOp_jl_Jv)
3801{
3802 IEMOP_MNEMONIC("jl/jnge Jv");
3803 IEMOP_HLP_MIN_386();
3804 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3805 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3806 {
3807 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3808 IEMOP_HLP_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3812 IEM_MC_REL_JMP_S16(i16Imm);
3813 } IEM_MC_ELSE() {
3814 IEM_MC_ADVANCE_RIP();
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 else
3819 {
3820 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3821 IEMOP_HLP_NO_LOCK_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0);
3824 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3825 IEM_MC_REL_JMP_S32(i32Imm);
3826 } IEM_MC_ELSE() {
3827 IEM_MC_ADVANCE_RIP();
3828 } IEM_MC_ENDIF();
3829 IEM_MC_END();
3830 }
3831 return VINF_SUCCESS;
3832}
3833
3834
3835/** Opcode 0x0f 0x8d. */
3836FNIEMOP_DEF(iemOp_jnl_Jv)
3837{
3838 IEMOP_MNEMONIC("jnl/jge Jv");
3839 IEMOP_HLP_MIN_386();
3840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3841 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3842 {
3843 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3844 IEMOP_HLP_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3848 IEM_MC_ADVANCE_RIP();
3849 } IEM_MC_ELSE() {
3850 IEM_MC_REL_JMP_S16(i16Imm);
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 else
3855 {
3856 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3857 IEMOP_HLP_NO_LOCK_PREFIX();
3858
3859 IEM_MC_BEGIN(0, 0);
3860 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3861 IEM_MC_ADVANCE_RIP();
3862 } IEM_MC_ELSE() {
3863 IEM_MC_REL_JMP_S32(i32Imm);
3864 } IEM_MC_ENDIF();
3865 IEM_MC_END();
3866 }
3867 return VINF_SUCCESS;
3868}
3869
3870
3871/** Opcode 0x0f 0x8e. */
3872FNIEMOP_DEF(iemOp_jle_Jv)
3873{
3874 IEMOP_MNEMONIC("jle/jng Jv");
3875 IEMOP_HLP_MIN_386();
3876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3877 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3878 {
3879 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3880 IEMOP_HLP_NO_LOCK_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3884 IEM_MC_REL_JMP_S16(i16Imm);
3885 } IEM_MC_ELSE() {
3886 IEM_MC_ADVANCE_RIP();
3887 } IEM_MC_ENDIF();
3888 IEM_MC_END();
3889 }
3890 else
3891 {
3892 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3893 IEMOP_HLP_NO_LOCK_PREFIX();
3894
3895 IEM_MC_BEGIN(0, 0);
3896 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3897 IEM_MC_REL_JMP_S32(i32Imm);
3898 } IEM_MC_ELSE() {
3899 IEM_MC_ADVANCE_RIP();
3900 } IEM_MC_ENDIF();
3901 IEM_MC_END();
3902 }
3903 return VINF_SUCCESS;
3904}
3905
3906
3907/** Opcode 0x0f 0x8f. */
3908FNIEMOP_DEF(iemOp_jnle_Jv)
3909{
3910 IEMOP_MNEMONIC("jnle/jg Jv");
3911 IEMOP_HLP_MIN_386();
3912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3913 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3914 {
3915 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3916 IEMOP_HLP_NO_LOCK_PREFIX();
3917
3918 IEM_MC_BEGIN(0, 0);
3919 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3920 IEM_MC_ADVANCE_RIP();
3921 } IEM_MC_ELSE() {
3922 IEM_MC_REL_JMP_S16(i16Imm);
3923 } IEM_MC_ENDIF();
3924 IEM_MC_END();
3925 }
3926 else
3927 {
3928 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3929 IEMOP_HLP_NO_LOCK_PREFIX();
3930
3931 IEM_MC_BEGIN(0, 0);
3932 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3933 IEM_MC_ADVANCE_RIP();
3934 } IEM_MC_ELSE() {
3935 IEM_MC_REL_JMP_S32(i32Imm);
3936 } IEM_MC_ENDIF();
3937 IEM_MC_END();
3938 }
3939 return VINF_SUCCESS;
3940}
3941
3942
3943/** Opcode 0x0f 0x90. */
3944FNIEMOP_DEF(iemOp_seto_Eb)
3945{
3946 IEMOP_MNEMONIC("seto Eb");
3947 IEMOP_HLP_MIN_386();
3948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3949 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3950
3951 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3952 * any way. AMD says it's "unused", whatever that means. We're
3953 * ignoring for now. */
3954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3955 {
3956 /* register target */
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3959 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3962 } IEM_MC_ENDIF();
3963 IEM_MC_ADVANCE_RIP();
3964 IEM_MC_END();
3965 }
3966 else
3967 {
3968 /* memory target */
3969 IEM_MC_BEGIN(0, 1);
3970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3973 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3974 } IEM_MC_ELSE() {
3975 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3976 } IEM_MC_ENDIF();
3977 IEM_MC_ADVANCE_RIP();
3978 IEM_MC_END();
3979 }
3980 return VINF_SUCCESS;
3981}
3982
3983
3984/** Opcode 0x0f 0x91. */
3985FNIEMOP_DEF(iemOp_setno_Eb)
3986{
3987 IEMOP_MNEMONIC("setno Eb");
3988 IEMOP_HLP_MIN_386();
3989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3990 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3991
3992 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3993 * any way. AMD says it's "unused", whatever that means. We're
3994 * ignoring for now. */
3995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3996 {
3997 /* register target */
3998 IEM_MC_BEGIN(0, 0);
3999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4000 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4001 } IEM_MC_ELSE() {
4002 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4003 } IEM_MC_ENDIF();
4004 IEM_MC_ADVANCE_RIP();
4005 IEM_MC_END();
4006 }
4007 else
4008 {
4009 /* memory target */
4010 IEM_MC_BEGIN(0, 1);
4011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4013 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4014 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4015 } IEM_MC_ELSE() {
4016 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4017 } IEM_MC_ENDIF();
4018 IEM_MC_ADVANCE_RIP();
4019 IEM_MC_END();
4020 }
4021 return VINF_SUCCESS;
4022}
4023
4024
4025/** Opcode 0x0f 0x92. */
4026FNIEMOP_DEF(iemOp_setc_Eb)
4027{
4028 IEMOP_MNEMONIC("setc Eb");
4029 IEMOP_HLP_MIN_386();
4030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4031 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4032
4033 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4034 * any way. AMD says it's "unused", whatever that means. We're
4035 * ignoring for now. */
4036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4037 {
4038 /* register target */
4039 IEM_MC_BEGIN(0, 0);
4040 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4041 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4042 } IEM_MC_ELSE() {
4043 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4044 } IEM_MC_ENDIF();
4045 IEM_MC_ADVANCE_RIP();
4046 IEM_MC_END();
4047 }
4048 else
4049 {
4050 /* memory target */
4051 IEM_MC_BEGIN(0, 1);
4052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4055 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4056 } IEM_MC_ELSE() {
4057 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4058 } IEM_MC_ENDIF();
4059 IEM_MC_ADVANCE_RIP();
4060 IEM_MC_END();
4061 }
4062 return VINF_SUCCESS;
4063}
4064
4065
4066/** Opcode 0x0f 0x93. */
4067FNIEMOP_DEF(iemOp_setnc_Eb)
4068{
4069 IEMOP_MNEMONIC("setnc Eb");
4070 IEMOP_HLP_MIN_386();
4071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4072 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4073
4074 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4075 * any way. AMD says it's "unused", whatever that means. We're
4076 * ignoring for now. */
4077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4078 {
4079 /* register target */
4080 IEM_MC_BEGIN(0, 0);
4081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4082 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4083 } IEM_MC_ELSE() {
4084 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4085 } IEM_MC_ENDIF();
4086 IEM_MC_ADVANCE_RIP();
4087 IEM_MC_END();
4088 }
4089 else
4090 {
4091 /* memory target */
4092 IEM_MC_BEGIN(0, 1);
4093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4096 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4097 } IEM_MC_ELSE() {
4098 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4099 } IEM_MC_ENDIF();
4100 IEM_MC_ADVANCE_RIP();
4101 IEM_MC_END();
4102 }
4103 return VINF_SUCCESS;
4104}
4105
4106
4107/** Opcode 0x0f 0x94. */
4108FNIEMOP_DEF(iemOp_sete_Eb)
4109{
4110 IEMOP_MNEMONIC("sete Eb");
4111 IEMOP_HLP_MIN_386();
4112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4113 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4114
4115 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4116 * any way. AMD says it's "unused", whatever that means. We're
4117 * ignoring for now. */
4118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4119 {
4120 /* register target */
4121 IEM_MC_BEGIN(0, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4123 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4126 } IEM_MC_ENDIF();
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 }
4130 else
4131 {
4132 /* memory target */
4133 IEM_MC_BEGIN(0, 1);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4137 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4138 } IEM_MC_ELSE() {
4139 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4140 } IEM_MC_ENDIF();
4141 IEM_MC_ADVANCE_RIP();
4142 IEM_MC_END();
4143 }
4144 return VINF_SUCCESS;
4145}
4146
4147
4148/** Opcode 0x0f 0x95. */
4149FNIEMOP_DEF(iemOp_setne_Eb)
4150{
4151 IEMOP_MNEMONIC("setne Eb");
4152 IEMOP_HLP_MIN_386();
4153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4154 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4155
4156 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4157 * any way. AMD says it's "unused", whatever that means. We're
4158 * ignoring for now. */
4159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4160 {
4161 /* register target */
4162 IEM_MC_BEGIN(0, 0);
4163 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4164 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4165 } IEM_MC_ELSE() {
4166 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4167 } IEM_MC_ENDIF();
4168 IEM_MC_ADVANCE_RIP();
4169 IEM_MC_END();
4170 }
4171 else
4172 {
4173 /* memory target */
4174 IEM_MC_BEGIN(0, 1);
4175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4178 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4179 } IEM_MC_ELSE() {
4180 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4181 } IEM_MC_ENDIF();
4182 IEM_MC_ADVANCE_RIP();
4183 IEM_MC_END();
4184 }
4185 return VINF_SUCCESS;
4186}
4187
4188
4189/** Opcode 0x0f 0x96. */
4190FNIEMOP_DEF(iemOp_setbe_Eb)
4191{
4192 IEMOP_MNEMONIC("setbe Eb");
4193 IEMOP_HLP_MIN_386();
4194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4195 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4196
4197 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4198 * any way. AMD says it's "unused", whatever that means. We're
4199 * ignoring for now. */
4200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4201 {
4202 /* register target */
4203 IEM_MC_BEGIN(0, 0);
4204 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4205 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /* memory target */
4215 IEM_MC_BEGIN(0, 1);
4216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4218 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4219 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4220 } IEM_MC_ELSE() {
4221 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4222 } IEM_MC_ENDIF();
4223 IEM_MC_ADVANCE_RIP();
4224 IEM_MC_END();
4225 }
4226 return VINF_SUCCESS;
4227}
4228
4229
4230/** Opcode 0x0f 0x97. */
4231FNIEMOP_DEF(iemOp_setnbe_Eb)
4232{
4233 IEMOP_MNEMONIC("setnbe Eb");
4234 IEMOP_HLP_MIN_386();
4235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4236 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4237
4238 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4239 * any way. AMD says it's "unused", whatever that means. We're
4240 * ignoring for now. */
4241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4242 {
4243 /* register target */
4244 IEM_MC_BEGIN(0, 0);
4245 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4246 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4247 } IEM_MC_ELSE() {
4248 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4249 } IEM_MC_ENDIF();
4250 IEM_MC_ADVANCE_RIP();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 /* memory target */
4256 IEM_MC_BEGIN(0, 1);
4257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4259 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4260 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4261 } IEM_MC_ELSE() {
4262 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4263 } IEM_MC_ENDIF();
4264 IEM_MC_ADVANCE_RIP();
4265 IEM_MC_END();
4266 }
4267 return VINF_SUCCESS;
4268}
4269
4270
4271/** Opcode 0x0f 0x98. */
4272FNIEMOP_DEF(iemOp_sets_Eb)
4273{
4274 IEMOP_MNEMONIC("sets Eb");
4275 IEMOP_HLP_MIN_386();
4276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4277 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4278
4279 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4280 * any way. AMD says it's "unused", whatever that means. We're
4281 * ignoring for now. */
4282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4283 {
4284 /* register target */
4285 IEM_MC_BEGIN(0, 0);
4286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4287 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4288 } IEM_MC_ELSE() {
4289 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4290 } IEM_MC_ENDIF();
4291 IEM_MC_ADVANCE_RIP();
4292 IEM_MC_END();
4293 }
4294 else
4295 {
4296 /* memory target */
4297 IEM_MC_BEGIN(0, 1);
4298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4301 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4302 } IEM_MC_ELSE() {
4303 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4304 } IEM_MC_ENDIF();
4305 IEM_MC_ADVANCE_RIP();
4306 IEM_MC_END();
4307 }
4308 return VINF_SUCCESS;
4309}
4310
4311
4312/** Opcode 0x0f 0x99. */
4313FNIEMOP_DEF(iemOp_setns_Eb)
4314{
4315 IEMOP_MNEMONIC("setns Eb");
4316 IEMOP_HLP_MIN_386();
4317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4318 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4319
4320 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4321 * any way. AMD says it's "unused", whatever that means. We're
4322 * ignoring for now. */
4323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4324 {
4325 /* register target */
4326 IEM_MC_BEGIN(0, 0);
4327 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4328 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4329 } IEM_MC_ELSE() {
4330 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4331 } IEM_MC_ENDIF();
4332 IEM_MC_ADVANCE_RIP();
4333 IEM_MC_END();
4334 }
4335 else
4336 {
4337 /* memory target */
4338 IEM_MC_BEGIN(0, 1);
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4341 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4342 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4343 } IEM_MC_ELSE() {
4344 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4345 } IEM_MC_ENDIF();
4346 IEM_MC_ADVANCE_RIP();
4347 IEM_MC_END();
4348 }
4349 return VINF_SUCCESS;
4350}
4351
4352
4353/** Opcode 0x0f 0x9a. */
4354FNIEMOP_DEF(iemOp_setp_Eb)
4355{
4356 IEMOP_MNEMONIC("setnp Eb");
4357 IEMOP_HLP_MIN_386();
4358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4359 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4360
4361 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4362 * any way. AMD says it's "unused", whatever that means. We're
4363 * ignoring for now. */
4364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4365 {
4366 /* register target */
4367 IEM_MC_BEGIN(0, 0);
4368 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4369 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4370 } IEM_MC_ELSE() {
4371 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4372 } IEM_MC_ENDIF();
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 /* memory target */
4379 IEM_MC_BEGIN(0, 1);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4383 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4384 } IEM_MC_ELSE() {
4385 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4386 } IEM_MC_ENDIF();
4387 IEM_MC_ADVANCE_RIP();
4388 IEM_MC_END();
4389 }
4390 return VINF_SUCCESS;
4391}
4392
4393
4394/** Opcode 0x0f 0x9b. */
4395FNIEMOP_DEF(iemOp_setnp_Eb)
4396{
4397 IEMOP_MNEMONIC("setnp Eb");
4398 IEMOP_HLP_MIN_386();
4399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4400 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4401
4402 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4403 * any way. AMD says it's "unused", whatever that means. We're
4404 * ignoring for now. */
4405 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4406 {
4407 /* register target */
4408 IEM_MC_BEGIN(0, 0);
4409 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4410 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4411 } IEM_MC_ELSE() {
4412 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4413 } IEM_MC_ENDIF();
4414 IEM_MC_ADVANCE_RIP();
4415 IEM_MC_END();
4416 }
4417 else
4418 {
4419 /* memory target */
4420 IEM_MC_BEGIN(0, 1);
4421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4424 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4425 } IEM_MC_ELSE() {
4426 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4427 } IEM_MC_ENDIF();
4428 IEM_MC_ADVANCE_RIP();
4429 IEM_MC_END();
4430 }
4431 return VINF_SUCCESS;
4432}
4433
4434
4435/** Opcode 0x0f 0x9c. */
4436FNIEMOP_DEF(iemOp_setl_Eb)
4437{
4438 IEMOP_MNEMONIC("setl Eb");
4439 IEMOP_HLP_MIN_386();
4440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4441 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4442
4443 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4444 * any way. AMD says it's "unused", whatever that means. We're
4445 * ignoring for now. */
4446 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4447 {
4448 /* register target */
4449 IEM_MC_BEGIN(0, 0);
4450 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4451 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4452 } IEM_MC_ELSE() {
4453 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4454 } IEM_MC_ENDIF();
4455 IEM_MC_ADVANCE_RIP();
4456 IEM_MC_END();
4457 }
4458 else
4459 {
4460 /* memory target */
4461 IEM_MC_BEGIN(0, 1);
4462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4464 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4465 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4466 } IEM_MC_ELSE() {
4467 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4468 } IEM_MC_ENDIF();
4469 IEM_MC_ADVANCE_RIP();
4470 IEM_MC_END();
4471 }
4472 return VINF_SUCCESS;
4473}
4474
4475
4476/** Opcode 0x0f 0x9d. */
4477FNIEMOP_DEF(iemOp_setnl_Eb)
4478{
4479 IEMOP_MNEMONIC("setnl Eb");
4480 IEMOP_HLP_MIN_386();
4481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4482 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4483
4484 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4485 * any way. AMD says it's "unused", whatever that means. We're
4486 * ignoring for now. */
4487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4488 {
4489 /* register target */
4490 IEM_MC_BEGIN(0, 0);
4491 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4492 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4493 } IEM_MC_ELSE() {
4494 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4495 } IEM_MC_ENDIF();
4496 IEM_MC_ADVANCE_RIP();
4497 IEM_MC_END();
4498 }
4499 else
4500 {
4501 /* memory target */
4502 IEM_MC_BEGIN(0, 1);
4503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4505 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4506 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4507 } IEM_MC_ELSE() {
4508 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4509 } IEM_MC_ENDIF();
4510 IEM_MC_ADVANCE_RIP();
4511 IEM_MC_END();
4512 }
4513 return VINF_SUCCESS;
4514}
4515
4516
4517/** Opcode 0x0f 0x9e. */
4518FNIEMOP_DEF(iemOp_setle_Eb)
4519{
4520 IEMOP_MNEMONIC("setle Eb");
4521 IEMOP_HLP_MIN_386();
4522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4523 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4524
4525 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4526 * any way. AMD says it's "unused", whatever that means. We're
4527 * ignoring for now. */
4528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4529 {
4530 /* register target */
4531 IEM_MC_BEGIN(0, 0);
4532 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4533 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4534 } IEM_MC_ELSE() {
4535 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4536 } IEM_MC_ENDIF();
4537 IEM_MC_ADVANCE_RIP();
4538 IEM_MC_END();
4539 }
4540 else
4541 {
4542 /* memory target */
4543 IEM_MC_BEGIN(0, 1);
4544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4546 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4547 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4548 } IEM_MC_ELSE() {
4549 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4550 } IEM_MC_ENDIF();
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 }
4554 return VINF_SUCCESS;
4555}
4556
4557
4558/** Opcode 0x0f 0x9f. */
4559FNIEMOP_DEF(iemOp_setnle_Eb)
4560{
4561 IEMOP_MNEMONIC("setnle Eb");
4562 IEMOP_HLP_MIN_386();
4563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4564 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4565
4566 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4567 * any way. AMD says it's "unused", whatever that means. We're
4568 * ignoring for now. */
4569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4570 {
4571 /* register target */
4572 IEM_MC_BEGIN(0, 0);
4573 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4574 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4575 } IEM_MC_ELSE() {
4576 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4577 } IEM_MC_ENDIF();
4578 IEM_MC_ADVANCE_RIP();
4579 IEM_MC_END();
4580 }
4581 else
4582 {
4583 /* memory target */
4584 IEM_MC_BEGIN(0, 1);
4585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4587 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4588 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4589 } IEM_MC_ELSE() {
4590 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4591 } IEM_MC_ENDIF();
4592 IEM_MC_ADVANCE_RIP();
4593 IEM_MC_END();
4594 }
4595 return VINF_SUCCESS;
4596}
4597
4598
4599/**
4600 * Common 'push segment-register' helper.
4601 */
4602FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4603{
4604 IEMOP_HLP_NO_LOCK_PREFIX();
4605 if (iReg < X86_SREG_FS)
4606 IEMOP_HLP_NO_64BIT();
4607 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4608
4609 switch (pIemCpu->enmEffOpSize)
4610 {
4611 case IEMMODE_16BIT:
4612 IEM_MC_BEGIN(0, 1);
4613 IEM_MC_LOCAL(uint16_t, u16Value);
4614 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4615 IEM_MC_PUSH_U16(u16Value);
4616 IEM_MC_ADVANCE_RIP();
4617 IEM_MC_END();
4618 break;
4619
4620 case IEMMODE_32BIT:
4621 IEM_MC_BEGIN(0, 1);
4622 IEM_MC_LOCAL(uint32_t, u32Value);
4623 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4624 IEM_MC_PUSH_U32_SREG(u32Value);
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 break;
4628
4629 case IEMMODE_64BIT:
4630 IEM_MC_BEGIN(0, 1);
4631 IEM_MC_LOCAL(uint64_t, u64Value);
4632 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4633 IEM_MC_PUSH_U64(u64Value);
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 break;
4637 }
4638
4639 return VINF_SUCCESS;
4640}
4641
4642
4643/** Opcode 0x0f 0xa0. */
4644FNIEMOP_DEF(iemOp_push_fs)
4645{
4646 IEMOP_MNEMONIC("push fs");
4647 IEMOP_HLP_MIN_386();
4648 IEMOP_HLP_NO_LOCK_PREFIX();
4649 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4650}
4651
4652
4653/** Opcode 0x0f 0xa1. */
4654FNIEMOP_DEF(iemOp_pop_fs)
4655{
4656 IEMOP_MNEMONIC("pop fs");
4657 IEMOP_HLP_MIN_386();
4658 IEMOP_HLP_NO_LOCK_PREFIX();
4659 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4660}
4661
4662
4663/** Opcode 0x0f 0xa2. */
4664FNIEMOP_DEF(iemOp_cpuid)
4665{
4666 IEMOP_MNEMONIC("cpuid");
4667 IEMOP_HLP_MIN_486(); /* not all 486es. */
4668 IEMOP_HLP_NO_LOCK_PREFIX();
4669 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4670}
4671
4672
4673/**
4674 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4675 * iemOp_bts_Ev_Gv.
4676 */
4677FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4678{
4679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4681
4682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4683 {
4684 /* register destination. */
4685 IEMOP_HLP_NO_LOCK_PREFIX();
4686 switch (pIemCpu->enmEffOpSize)
4687 {
4688 case IEMMODE_16BIT:
4689 IEM_MC_BEGIN(3, 0);
4690 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4691 IEM_MC_ARG(uint16_t, u16Src, 1);
4692 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4693
4694 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4695 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4696 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4697 IEM_MC_REF_EFLAGS(pEFlags);
4698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4699
4700 IEM_MC_ADVANCE_RIP();
4701 IEM_MC_END();
4702 return VINF_SUCCESS;
4703
4704 case IEMMODE_32BIT:
4705 IEM_MC_BEGIN(3, 0);
4706 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4707 IEM_MC_ARG(uint32_t, u32Src, 1);
4708 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4709
4710 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4711 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4712 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4713 IEM_MC_REF_EFLAGS(pEFlags);
4714 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4715
4716 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4717 IEM_MC_ADVANCE_RIP();
4718 IEM_MC_END();
4719 return VINF_SUCCESS;
4720
4721 case IEMMODE_64BIT:
4722 IEM_MC_BEGIN(3, 0);
4723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4724 IEM_MC_ARG(uint64_t, u64Src, 1);
4725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4726
4727 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4728 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4729 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4730 IEM_MC_REF_EFLAGS(pEFlags);
4731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4732
4733 IEM_MC_ADVANCE_RIP();
4734 IEM_MC_END();
4735 return VINF_SUCCESS;
4736
4737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4738 }
4739 }
4740 else
4741 {
4742 /* memory destination. */
4743
4744 uint32_t fAccess;
4745 if (pImpl->pfnLockedU16)
4746 fAccess = IEM_ACCESS_DATA_RW;
4747 else /* BT */
4748 {
4749 IEMOP_HLP_NO_LOCK_PREFIX();
4750 fAccess = IEM_ACCESS_DATA_R;
4751 }
4752
4753 NOREF(fAccess);
4754
4755 /** @todo test negative bit offsets! */
4756 switch (pIemCpu->enmEffOpSize)
4757 {
4758 case IEMMODE_16BIT:
4759 IEM_MC_BEGIN(3, 2);
4760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4761 IEM_MC_ARG(uint16_t, u16Src, 1);
4762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4764 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4765
4766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4767 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4768 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4769 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4770 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4771 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4772 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4773 IEM_MC_FETCH_EFLAGS(EFlags);
4774
4775 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4776 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4777 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4778 else
4779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4780 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4781
4782 IEM_MC_COMMIT_EFLAGS(EFlags);
4783 IEM_MC_ADVANCE_RIP();
4784 IEM_MC_END();
4785 return VINF_SUCCESS;
4786
4787 case IEMMODE_32BIT:
4788 IEM_MC_BEGIN(3, 2);
4789 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4790 IEM_MC_ARG(uint32_t, u32Src, 1);
4791 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4793 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4794
4795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4796 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4797 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4798 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4799 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4800 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4801 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4802 IEM_MC_FETCH_EFLAGS(EFlags);
4803
4804 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4805 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4807 else
4808 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4810
4811 IEM_MC_COMMIT_EFLAGS(EFlags);
4812 IEM_MC_ADVANCE_RIP();
4813 IEM_MC_END();
4814 return VINF_SUCCESS;
4815
4816 case IEMMODE_64BIT:
4817 IEM_MC_BEGIN(3, 2);
4818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4819 IEM_MC_ARG(uint64_t, u64Src, 1);
4820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4822 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4823
4824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4825 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4826 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4827 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4828 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4829 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4830 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4831 IEM_MC_FETCH_EFLAGS(EFlags);
4832
4833 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4834 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4836 else
4837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4838 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4839
4840 IEM_MC_COMMIT_EFLAGS(EFlags);
4841 IEM_MC_ADVANCE_RIP();
4842 IEM_MC_END();
4843 return VINF_SUCCESS;
4844
4845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4846 }
4847 }
4848}
4849
4850
4851/** Opcode 0x0f 0xa3. */
4852FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4853{
4854 IEMOP_MNEMONIC("bt Gv,Gv");
4855 IEMOP_HLP_MIN_386();
4856 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4857}
4858
4859
4860/**
4861 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4862 */
4863FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4864{
4865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4866 IEMOP_HLP_NO_LOCK_PREFIX();
4867 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4868
4869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4870 {
4871 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4872 IEMOP_HLP_NO_LOCK_PREFIX();
4873
4874 switch (pIemCpu->enmEffOpSize)
4875 {
4876 case IEMMODE_16BIT:
4877 IEM_MC_BEGIN(4, 0);
4878 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4879 IEM_MC_ARG(uint16_t, u16Src, 1);
4880 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4881 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4882
4883 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4884 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4885 IEM_MC_REF_EFLAGS(pEFlags);
4886 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4887
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 return VINF_SUCCESS;
4891
4892 case IEMMODE_32BIT:
4893 IEM_MC_BEGIN(4, 0);
4894 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4895 IEM_MC_ARG(uint32_t, u32Src, 1);
4896 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4897 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4898
4899 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4900 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4901 IEM_MC_REF_EFLAGS(pEFlags);
4902 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4903
4904 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4905 IEM_MC_ADVANCE_RIP();
4906 IEM_MC_END();
4907 return VINF_SUCCESS;
4908
4909 case IEMMODE_64BIT:
4910 IEM_MC_BEGIN(4, 0);
4911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4912 IEM_MC_ARG(uint64_t, u64Src, 1);
4913 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4914 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4915
4916 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4917 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4918 IEM_MC_REF_EFLAGS(pEFlags);
4919 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4920
4921 IEM_MC_ADVANCE_RIP();
4922 IEM_MC_END();
4923 return VINF_SUCCESS;
4924
4925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4926 }
4927 }
4928 else
4929 {
4930 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4931
4932 switch (pIemCpu->enmEffOpSize)
4933 {
4934 case IEMMODE_16BIT:
4935 IEM_MC_BEGIN(4, 2);
4936 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4937 IEM_MC_ARG(uint16_t, u16Src, 1);
4938 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4939 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4941
4942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4943 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4944 IEM_MC_ASSIGN(cShiftArg, cShift);
4945 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4946 IEM_MC_FETCH_EFLAGS(EFlags);
4947 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4948 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4949
4950 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4951 IEM_MC_COMMIT_EFLAGS(EFlags);
4952 IEM_MC_ADVANCE_RIP();
4953 IEM_MC_END();
4954 return VINF_SUCCESS;
4955
4956 case IEMMODE_32BIT:
4957 IEM_MC_BEGIN(4, 2);
4958 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4959 IEM_MC_ARG(uint32_t, u32Src, 1);
4960 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4963
4964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4965 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4966 IEM_MC_ASSIGN(cShiftArg, cShift);
4967 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4968 IEM_MC_FETCH_EFLAGS(EFlags);
4969 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4970 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4971
4972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4973 IEM_MC_COMMIT_EFLAGS(EFlags);
4974 IEM_MC_ADVANCE_RIP();
4975 IEM_MC_END();
4976 return VINF_SUCCESS;
4977
4978 case IEMMODE_64BIT:
4979 IEM_MC_BEGIN(4, 2);
4980 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4981 IEM_MC_ARG(uint64_t, u64Src, 1);
4982 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4983 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4985
4986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4987 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4988 IEM_MC_ASSIGN(cShiftArg, cShift);
4989 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4990 IEM_MC_FETCH_EFLAGS(EFlags);
4991 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4992 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4993
4994 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4995 IEM_MC_COMMIT_EFLAGS(EFlags);
4996 IEM_MC_ADVANCE_RIP();
4997 IEM_MC_END();
4998 return VINF_SUCCESS;
4999
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5001 }
5002 }
5003}
5004
5005
5006/**
5007 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5008 */
5009FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5010{
5011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5012 IEMOP_HLP_NO_LOCK_PREFIX();
5013 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5014
5015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5016 {
5017 IEMOP_HLP_NO_LOCK_PREFIX();
5018
5019 switch (pIemCpu->enmEffOpSize)
5020 {
5021 case IEMMODE_16BIT:
5022 IEM_MC_BEGIN(4, 0);
5023 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5024 IEM_MC_ARG(uint16_t, u16Src, 1);
5025 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5026 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5027
5028 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5029 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5030 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5031 IEM_MC_REF_EFLAGS(pEFlags);
5032 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5033
5034 IEM_MC_ADVANCE_RIP();
5035 IEM_MC_END();
5036 return VINF_SUCCESS;
5037
5038 case IEMMODE_32BIT:
5039 IEM_MC_BEGIN(4, 0);
5040 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5041 IEM_MC_ARG(uint32_t, u32Src, 1);
5042 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5043 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5044
5045 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5046 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5047 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5048 IEM_MC_REF_EFLAGS(pEFlags);
5049 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5050
5051 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5052 IEM_MC_ADVANCE_RIP();
5053 IEM_MC_END();
5054 return VINF_SUCCESS;
5055
5056 case IEMMODE_64BIT:
5057 IEM_MC_BEGIN(4, 0);
5058 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5059 IEM_MC_ARG(uint64_t, u64Src, 1);
5060 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5061 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5062
5063 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5064 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5065 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5066 IEM_MC_REF_EFLAGS(pEFlags);
5067 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5068
5069 IEM_MC_ADVANCE_RIP();
5070 IEM_MC_END();
5071 return VINF_SUCCESS;
5072
5073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5074 }
5075 }
5076 else
5077 {
5078 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
5079
5080 switch (pIemCpu->enmEffOpSize)
5081 {
5082 case IEMMODE_16BIT:
5083 IEM_MC_BEGIN(4, 2);
5084 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5085 IEM_MC_ARG(uint16_t, u16Src, 1);
5086 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5087 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5089
5090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5091 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5092 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5093 IEM_MC_FETCH_EFLAGS(EFlags);
5094 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5095 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5096
5097 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5098 IEM_MC_COMMIT_EFLAGS(EFlags);
5099 IEM_MC_ADVANCE_RIP();
5100 IEM_MC_END();
5101 return VINF_SUCCESS;
5102
5103 case IEMMODE_32BIT:
5104 IEM_MC_BEGIN(4, 2);
5105 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5106 IEM_MC_ARG(uint32_t, u32Src, 1);
5107 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5108 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5110
5111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5112 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5113 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5114 IEM_MC_FETCH_EFLAGS(EFlags);
5115 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5116 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5117
5118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5119 IEM_MC_COMMIT_EFLAGS(EFlags);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 return VINF_SUCCESS;
5123
5124 case IEMMODE_64BIT:
5125 IEM_MC_BEGIN(4, 2);
5126 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5127 IEM_MC_ARG(uint64_t, u64Src, 1);
5128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5131
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5133 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5134 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5135 IEM_MC_FETCH_EFLAGS(EFlags);
5136 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5137 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5138
5139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5140 IEM_MC_COMMIT_EFLAGS(EFlags);
5141 IEM_MC_ADVANCE_RIP();
5142 IEM_MC_END();
5143 return VINF_SUCCESS;
5144
5145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5146 }
5147 }
5148}
5149
5150
5151
5152/** Opcode 0x0f 0xa4. */
5153FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5154{
5155 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5156 IEMOP_HLP_MIN_386();
5157 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5158}
5159
5160
5161/** Opcode 0x0f 0xa5. */
5162FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5163{
5164 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5165 IEMOP_HLP_MIN_386();
5166 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5167}
5168
5169
5170/** Opcode 0x0f 0xa8. */
5171FNIEMOP_DEF(iemOp_push_gs)
5172{
5173 IEMOP_MNEMONIC("push gs");
5174 IEMOP_HLP_MIN_386();
5175 IEMOP_HLP_NO_LOCK_PREFIX();
5176 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5177}
5178
5179
5180/** Opcode 0x0f 0xa9. */
5181FNIEMOP_DEF(iemOp_pop_gs)
5182{
5183 IEMOP_MNEMONIC("pop gs");
5184 IEMOP_HLP_MIN_386();
5185 IEMOP_HLP_NO_LOCK_PREFIX();
5186 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5187}
5188
5189
5190/** Opcode 0x0f 0xaa. */
5191FNIEMOP_STUB(iemOp_rsm);
5192//IEMOP_HLP_MIN_386();
5193
5194
5195/** Opcode 0x0f 0xab. */
5196FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5197{
5198 IEMOP_MNEMONIC("bts Ev,Gv");
5199 IEMOP_HLP_MIN_386();
5200 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5201}
5202
5203
5204/** Opcode 0x0f 0xac. */
5205FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5206{
5207 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5208 IEMOP_HLP_MIN_386();
5209 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5210}
5211
5212
5213/** Opcode 0x0f 0xad. */
5214FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5215{
5216 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5217 IEMOP_HLP_MIN_386();
5218 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5219}
5220
5221
5222/** Opcode 0x0f 0xae mem/0. */
5223FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5224{
5225 IEMOP_MNEMONIC("fxsave m512");
5226 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5227 return IEMOP_RAISE_INVALID_OPCODE();
5228
5229 IEM_MC_BEGIN(3, 1);
5230 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5231 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5232 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5235 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5236 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5237 IEM_MC_END();
5238 return VINF_SUCCESS;
5239}
5240
5241
5242/** Opcode 0x0f 0xae mem/1. */
5243FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5244{
5245 IEMOP_MNEMONIC("fxrstor m512");
5246 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5247 return IEMOP_RAISE_INVALID_OPCODE();
5248
5249 IEM_MC_BEGIN(3, 1);
5250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5251 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5252 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5256 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5257 IEM_MC_END();
5258 return VINF_SUCCESS;
5259}
5260
5261
5262/** Opcode 0x0f 0xae mem/2. */
5263FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5264
5265/** Opcode 0x0f 0xae mem/3. */
5266FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5267
5268/** Opcode 0x0f 0xae mem/4. */
5269FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5270
5271/** Opcode 0x0f 0xae mem/5. */
5272FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5273
5274/** Opcode 0x0f 0xae mem/6. */
5275FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5276
5277/** Opcode 0x0f 0xae mem/7. */
5278FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5279
5280
5281/** Opcode 0x0f 0xae 11b/5. */
5282FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5283{
5284 IEMOP_MNEMONIC("lfence");
5285 IEMOP_HLP_NO_LOCK_PREFIX();
5286 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5287 return IEMOP_RAISE_INVALID_OPCODE();
5288
5289 IEM_MC_BEGIN(0, 0);
5290 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5291 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5292 else
5293 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5294 IEM_MC_ADVANCE_RIP();
5295 IEM_MC_END();
5296 return VINF_SUCCESS;
5297}
5298
5299
5300/** Opcode 0x0f 0xae 11b/6. */
5301FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5302{
5303 IEMOP_MNEMONIC("mfence");
5304 IEMOP_HLP_NO_LOCK_PREFIX();
5305 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5306 return IEMOP_RAISE_INVALID_OPCODE();
5307
5308 IEM_MC_BEGIN(0, 0);
5309 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5310 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5311 else
5312 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5313 IEM_MC_ADVANCE_RIP();
5314 IEM_MC_END();
5315 return VINF_SUCCESS;
5316}
5317
5318
5319/** Opcode 0x0f 0xae 11b/7. */
5320FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5321{
5322 IEMOP_MNEMONIC("sfence");
5323 IEMOP_HLP_NO_LOCK_PREFIX();
5324 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5325 return IEMOP_RAISE_INVALID_OPCODE();
5326
5327 IEM_MC_BEGIN(0, 0);
5328 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5329 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5330 else
5331 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5332 IEM_MC_ADVANCE_RIP();
5333 IEM_MC_END();
5334 return VINF_SUCCESS;
5335}
5336
5337
5338/** Opcode 0xf3 0x0f 0xae 11b/0. */
5339FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5340
5341/** Opcode 0xf3 0x0f 0xae 11b/1. */
5342FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5343
5344/** Opcode 0xf3 0x0f 0xae 11b/2. */
5345FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5346
5347/** Opcode 0xf3 0x0f 0xae 11b/3. */
5348FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5349
5350
5351/** Opcode 0x0f 0xae. */
5352FNIEMOP_DEF(iemOp_Grp15)
5353{
5354 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5356 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5357 {
5358 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5359 {
5360 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5361 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5362 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5363 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5364 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5365 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5366 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5367 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5369 }
5370 }
5371 else
5372 {
5373 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5374 {
5375 case 0:
5376 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5377 {
5378 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5379 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5380 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5381 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5382 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5383 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5384 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5385 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5387 }
5388 break;
5389
5390 case IEM_OP_PRF_REPZ:
5391 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5392 {
5393 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5394 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5395 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5396 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5397 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5398 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5399 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5400 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 break;
5404
5405 default:
5406 return IEMOP_RAISE_INVALID_OPCODE();
5407 }
5408 }
5409}
5410
5411
5412/** Opcode 0x0f 0xaf. */
5413FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5414{
5415 IEMOP_MNEMONIC("imul Gv,Ev");
5416 IEMOP_HLP_MIN_386();
5417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5419}
5420
5421
5422/** Opcode 0x0f 0xb0. */
5423FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5424{
5425 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5426 IEMOP_HLP_MIN_486();
5427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5428
5429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5430 {
5431 IEMOP_HLP_DONE_DECODING();
5432 IEM_MC_BEGIN(4, 0);
5433 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5434 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5435 IEM_MC_ARG(uint8_t, u8Src, 2);
5436 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5437
5438 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5439 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5440 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5441 IEM_MC_REF_EFLAGS(pEFlags);
5442 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5444 else
5445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5446
5447 IEM_MC_ADVANCE_RIP();
5448 IEM_MC_END();
5449 }
5450 else
5451 {
5452 IEM_MC_BEGIN(4, 3);
5453 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5454 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5455 IEM_MC_ARG(uint8_t, u8Src, 2);
5456 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5458 IEM_MC_LOCAL(uint8_t, u8Al);
5459
5460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5461 IEMOP_HLP_DONE_DECODING();
5462 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5463 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5464 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5465 IEM_MC_FETCH_EFLAGS(EFlags);
5466 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5467 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5469 else
5470 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5471
5472 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5473 IEM_MC_COMMIT_EFLAGS(EFlags);
5474 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5475 IEM_MC_ADVANCE_RIP();
5476 IEM_MC_END();
5477 }
5478 return VINF_SUCCESS;
5479}
5480
5481/** Opcode 0x0f 0xb1. */
5482FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5483{
5484 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5485 IEMOP_HLP_MIN_486();
5486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5487
5488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5489 {
5490 IEMOP_HLP_DONE_DECODING();
5491 switch (pIemCpu->enmEffOpSize)
5492 {
5493 case IEMMODE_16BIT:
5494 IEM_MC_BEGIN(4, 0);
5495 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5496 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5497 IEM_MC_ARG(uint16_t, u16Src, 2);
5498 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5499
5500 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5501 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5502 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5503 IEM_MC_REF_EFLAGS(pEFlags);
5504 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5506 else
5507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5508
5509 IEM_MC_ADVANCE_RIP();
5510 IEM_MC_END();
5511 return VINF_SUCCESS;
5512
5513 case IEMMODE_32BIT:
5514 IEM_MC_BEGIN(4, 0);
5515 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5516 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5517 IEM_MC_ARG(uint32_t, u32Src, 2);
5518 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5519
5520 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5521 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5522 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5523 IEM_MC_REF_EFLAGS(pEFlags);
5524 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5525 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5526 else
5527 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5528
5529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5530 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5531 IEM_MC_ADVANCE_RIP();
5532 IEM_MC_END();
5533 return VINF_SUCCESS;
5534
5535 case IEMMODE_64BIT:
5536 IEM_MC_BEGIN(4, 0);
5537 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5538 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5539#ifdef RT_ARCH_X86
5540 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5541#else
5542 IEM_MC_ARG(uint64_t, u64Src, 2);
5543#endif
5544 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5545
5546 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5547 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5548 IEM_MC_REF_EFLAGS(pEFlags);
5549#ifdef RT_ARCH_X86
5550 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5551 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5552 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5553 else
5554 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5555#else
5556 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5557 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5558 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5559 else
5560 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5561#endif
5562
5563 IEM_MC_ADVANCE_RIP();
5564 IEM_MC_END();
5565 return VINF_SUCCESS;
5566
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 }
5570 else
5571 {
5572 switch (pIemCpu->enmEffOpSize)
5573 {
5574 case IEMMODE_16BIT:
5575 IEM_MC_BEGIN(4, 3);
5576 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5577 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5578 IEM_MC_ARG(uint16_t, u16Src, 2);
5579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5581 IEM_MC_LOCAL(uint16_t, u16Ax);
5582
5583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5584 IEMOP_HLP_DONE_DECODING();
5585 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5586 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5587 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5588 IEM_MC_FETCH_EFLAGS(EFlags);
5589 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5590 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5591 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5592 else
5593 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5594
5595 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5596 IEM_MC_COMMIT_EFLAGS(EFlags);
5597 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 return VINF_SUCCESS;
5601
5602 case IEMMODE_32BIT:
5603 IEM_MC_BEGIN(4, 3);
5604 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5605 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5606 IEM_MC_ARG(uint32_t, u32Src, 2);
5607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5609 IEM_MC_LOCAL(uint32_t, u32Eax);
5610
5611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5612 IEMOP_HLP_DONE_DECODING();
5613 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5614 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5615 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5616 IEM_MC_FETCH_EFLAGS(EFlags);
5617 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5618 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5619 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5620 else
5621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5622
5623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5624 IEM_MC_COMMIT_EFLAGS(EFlags);
5625 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5626 IEM_MC_ADVANCE_RIP();
5627 IEM_MC_END();
5628 return VINF_SUCCESS;
5629
5630 case IEMMODE_64BIT:
5631 IEM_MC_BEGIN(4, 3);
5632 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5633 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5634#ifdef RT_ARCH_X86
5635 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5636#else
5637 IEM_MC_ARG(uint64_t, u64Src, 2);
5638#endif
5639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5641 IEM_MC_LOCAL(uint64_t, u64Rax);
5642
5643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5644 IEMOP_HLP_DONE_DECODING();
5645 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5646 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5647 IEM_MC_FETCH_EFLAGS(EFlags);
5648 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5649#ifdef RT_ARCH_X86
5650 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5651 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5652 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5653 else
5654 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5655#else
5656 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5657 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5658 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5659 else
5660 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5661#endif
5662
5663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5664 IEM_MC_COMMIT_EFLAGS(EFlags);
5665 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5666 IEM_MC_ADVANCE_RIP();
5667 IEM_MC_END();
5668 return VINF_SUCCESS;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673}
5674
5675
5676FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5677{
5678 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5679 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5680
5681 switch (pIemCpu->enmEffOpSize)
5682 {
5683 case IEMMODE_16BIT:
5684 IEM_MC_BEGIN(5, 1);
5685 IEM_MC_ARG(uint16_t, uSel, 0);
5686 IEM_MC_ARG(uint16_t, offSeg, 1);
5687 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5688 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5689 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5690 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5693 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5694 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5695 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5696 IEM_MC_END();
5697 return VINF_SUCCESS;
5698
5699 case IEMMODE_32BIT:
5700 IEM_MC_BEGIN(5, 1);
5701 IEM_MC_ARG(uint16_t, uSel, 0);
5702 IEM_MC_ARG(uint32_t, offSeg, 1);
5703 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5704 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5705 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5706 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5709 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5710 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5711 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5712 IEM_MC_END();
5713 return VINF_SUCCESS;
5714
5715 case IEMMODE_64BIT:
5716 IEM_MC_BEGIN(5, 1);
5717 IEM_MC_ARG(uint16_t, uSel, 0);
5718 IEM_MC_ARG(uint64_t, offSeg, 1);
5719 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5720 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5721 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5722 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5725 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5726 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5727 else
5728 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5729 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5730 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5731 IEM_MC_END();
5732 return VINF_SUCCESS;
5733
5734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5735 }
5736}
5737
5738
5739/** Opcode 0x0f 0xb2. */
5740FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5741{
5742 IEMOP_MNEMONIC("lss Gv,Mp");
5743 IEMOP_HLP_MIN_386();
5744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5746 return IEMOP_RAISE_INVALID_OPCODE();
5747 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5748}
5749
5750
5751/** Opcode 0x0f 0xb3. */
5752FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5753{
5754 IEMOP_MNEMONIC("btr Ev,Gv");
5755 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5756}
5757
5758
5759/** Opcode 0x0f 0xb4. */
5760FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5761{
5762 IEMOP_MNEMONIC("lfs Gv,Mp");
5763 IEMOP_HLP_MIN_386();
5764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5766 return IEMOP_RAISE_INVALID_OPCODE();
5767 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5768}
5769
5770
5771/** Opcode 0x0f 0xb5. */
5772FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5773{
5774 IEMOP_MNEMONIC("lgs Gv,Mp");
5775 IEMOP_HLP_MIN_386();
5776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5778 return IEMOP_RAISE_INVALID_OPCODE();
5779 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5780}
5781
5782
5783/** Opcode 0x0f 0xb6. */
5784FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5785{
5786 IEMOP_MNEMONIC("movzx Gv,Eb");
5787 IEMOP_HLP_MIN_386();
5788
5789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5790 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5791
5792 /*
5793 * If rm is denoting a register, no more instruction bytes.
5794 */
5795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5796 {
5797 switch (pIemCpu->enmEffOpSize)
5798 {
5799 case IEMMODE_16BIT:
5800 IEM_MC_BEGIN(0, 1);
5801 IEM_MC_LOCAL(uint16_t, u16Value);
5802 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5803 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5804 IEM_MC_ADVANCE_RIP();
5805 IEM_MC_END();
5806 return VINF_SUCCESS;
5807
5808 case IEMMODE_32BIT:
5809 IEM_MC_BEGIN(0, 1);
5810 IEM_MC_LOCAL(uint32_t, u32Value);
5811 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5812 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5813 IEM_MC_ADVANCE_RIP();
5814 IEM_MC_END();
5815 return VINF_SUCCESS;
5816
5817 case IEMMODE_64BIT:
5818 IEM_MC_BEGIN(0, 1);
5819 IEM_MC_LOCAL(uint64_t, u64Value);
5820 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5821 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5822 IEM_MC_ADVANCE_RIP();
5823 IEM_MC_END();
5824 return VINF_SUCCESS;
5825
5826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5827 }
5828 }
5829 else
5830 {
5831 /*
5832 * We're loading a register from memory.
5833 */
5834 switch (pIemCpu->enmEffOpSize)
5835 {
5836 case IEMMODE_16BIT:
5837 IEM_MC_BEGIN(0, 2);
5838 IEM_MC_LOCAL(uint16_t, u16Value);
5839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5841 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5842 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5843 IEM_MC_ADVANCE_RIP();
5844 IEM_MC_END();
5845 return VINF_SUCCESS;
5846
5847 case IEMMODE_32BIT:
5848 IEM_MC_BEGIN(0, 2);
5849 IEM_MC_LOCAL(uint32_t, u32Value);
5850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5852 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5853 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5854 IEM_MC_ADVANCE_RIP();
5855 IEM_MC_END();
5856 return VINF_SUCCESS;
5857
5858 case IEMMODE_64BIT:
5859 IEM_MC_BEGIN(0, 2);
5860 IEM_MC_LOCAL(uint64_t, u64Value);
5861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5863 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5864 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 return VINF_SUCCESS;
5868
5869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5870 }
5871 }
5872}
5873
5874
5875/** Opcode 0x0f 0xb7. */
5876FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5877{
5878 IEMOP_MNEMONIC("movzx Gv,Ew");
5879 IEMOP_HLP_MIN_386();
5880
5881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5882 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5883
5884 /** @todo Not entirely sure how the operand size prefix is handled here,
5885 * assuming that it will be ignored. Would be nice to have a few
5886 * test for this. */
5887 /*
5888 * If rm is denoting a register, no more instruction bytes.
5889 */
5890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5891 {
5892 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5893 {
5894 IEM_MC_BEGIN(0, 1);
5895 IEM_MC_LOCAL(uint32_t, u32Value);
5896 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5897 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5898 IEM_MC_ADVANCE_RIP();
5899 IEM_MC_END();
5900 }
5901 else
5902 {
5903 IEM_MC_BEGIN(0, 1);
5904 IEM_MC_LOCAL(uint64_t, u64Value);
5905 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5906 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5907 IEM_MC_ADVANCE_RIP();
5908 IEM_MC_END();
5909 }
5910 }
5911 else
5912 {
5913 /*
5914 * We're loading a register from memory.
5915 */
5916 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5917 {
5918 IEM_MC_BEGIN(0, 2);
5919 IEM_MC_LOCAL(uint32_t, u32Value);
5920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5922 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5923 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5924 IEM_MC_ADVANCE_RIP();
5925 IEM_MC_END();
5926 }
5927 else
5928 {
5929 IEM_MC_BEGIN(0, 2);
5930 IEM_MC_LOCAL(uint64_t, u64Value);
5931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5933 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5934 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 }
5938 }
5939 return VINF_SUCCESS;
5940}
5941
5942
5943/** Opcode 0x0f 0xb8. */
5944FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5945
5946
5947/** Opcode 0x0f 0xb9. */
5948FNIEMOP_DEF(iemOp_Grp10)
5949{
5950 Log(("iemOp_Grp10 -> #UD\n"));
5951 return IEMOP_RAISE_INVALID_OPCODE();
5952}
5953
5954
5955/** Opcode 0x0f 0xba. */
5956FNIEMOP_DEF(iemOp_Grp8)
5957{
5958 IEMOP_HLP_MIN_386();
5959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5960 PCIEMOPBINSIZES pImpl;
5961 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5962 {
5963 case 0: case 1: case 2: case 3:
5964 return IEMOP_RAISE_INVALID_OPCODE();
5965 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5966 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5967 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5968 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5970 }
5971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5972
5973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5974 {
5975 /* register destination. */
5976 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5977 IEMOP_HLP_NO_LOCK_PREFIX();
5978
5979 switch (pIemCpu->enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(3, 0);
5983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5984 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5986
5987 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5988 IEM_MC_REF_EFLAGS(pEFlags);
5989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5990
5991 IEM_MC_ADVANCE_RIP();
5992 IEM_MC_END();
5993 return VINF_SUCCESS;
5994
5995 case IEMMODE_32BIT:
5996 IEM_MC_BEGIN(3, 0);
5997 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5998 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6000
6001 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6002 IEM_MC_REF_EFLAGS(pEFlags);
6003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6004
6005 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6006 IEM_MC_ADVANCE_RIP();
6007 IEM_MC_END();
6008 return VINF_SUCCESS;
6009
6010 case IEMMODE_64BIT:
6011 IEM_MC_BEGIN(3, 0);
6012 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6013 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6015
6016 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6017 IEM_MC_REF_EFLAGS(pEFlags);
6018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6019
6020 IEM_MC_ADVANCE_RIP();
6021 IEM_MC_END();
6022 return VINF_SUCCESS;
6023
6024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6025 }
6026 }
6027 else
6028 {
6029 /* memory destination. */
6030
6031 uint32_t fAccess;
6032 if (pImpl->pfnLockedU16)
6033 fAccess = IEM_ACCESS_DATA_RW;
6034 else /* BT */
6035 {
6036 IEMOP_HLP_NO_LOCK_PREFIX();
6037 fAccess = IEM_ACCESS_DATA_R;
6038 }
6039
6040 /** @todo test negative bit offsets! */
6041 switch (pIemCpu->enmEffOpSize)
6042 {
6043 case IEMMODE_16BIT:
6044 IEM_MC_BEGIN(3, 1);
6045 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6046 IEM_MC_ARG(uint16_t, u16Src, 1);
6047 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6049
6050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6051 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6052 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6053 IEM_MC_FETCH_EFLAGS(EFlags);
6054 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6055 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6056 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6057 else
6058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6059 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6060
6061 IEM_MC_COMMIT_EFLAGS(EFlags);
6062 IEM_MC_ADVANCE_RIP();
6063 IEM_MC_END();
6064 return VINF_SUCCESS;
6065
6066 case IEMMODE_32BIT:
6067 IEM_MC_BEGIN(3, 1);
6068 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6069 IEM_MC_ARG(uint32_t, u32Src, 1);
6070 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6072
6073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6074 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6075 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6076 IEM_MC_FETCH_EFLAGS(EFlags);
6077 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6078 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6079 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6080 else
6081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6083
6084 IEM_MC_COMMIT_EFLAGS(EFlags);
6085 IEM_MC_ADVANCE_RIP();
6086 IEM_MC_END();
6087 return VINF_SUCCESS;
6088
6089 case IEMMODE_64BIT:
6090 IEM_MC_BEGIN(3, 1);
6091 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6092 IEM_MC_ARG(uint64_t, u64Src, 1);
6093 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6095
6096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6097 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6098 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6099 IEM_MC_FETCH_EFLAGS(EFlags);
6100 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6101 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6103 else
6104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6105 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6106
6107 IEM_MC_COMMIT_EFLAGS(EFlags);
6108 IEM_MC_ADVANCE_RIP();
6109 IEM_MC_END();
6110 return VINF_SUCCESS;
6111
6112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6113 }
6114 }
6115
6116}
6117
6118
6119/** Opcode 0x0f 0xbb. */
6120FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6121{
6122 IEMOP_MNEMONIC("btc Ev,Gv");
6123 IEMOP_HLP_MIN_386();
6124 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6125}
6126
6127
6128/** Opcode 0x0f 0xbc. */
6129FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6130{
6131 IEMOP_MNEMONIC("bsf Gv,Ev");
6132 IEMOP_HLP_MIN_386();
6133 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6134 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6135}
6136
6137
6138/** Opcode 0x0f 0xbd. */
6139FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6140{
6141 IEMOP_MNEMONIC("bsr Gv,Ev");
6142 IEMOP_HLP_MIN_386();
6143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6144 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6145}
6146
6147
6148/** Opcode 0x0f 0xbe. */
6149FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6150{
6151 IEMOP_MNEMONIC("movsx Gv,Eb");
6152 IEMOP_HLP_MIN_386();
6153
6154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6155 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6156
6157 /*
6158 * If rm is denoting a register, no more instruction bytes.
6159 */
6160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6161 {
6162 switch (pIemCpu->enmEffOpSize)
6163 {
6164 case IEMMODE_16BIT:
6165 IEM_MC_BEGIN(0, 1);
6166 IEM_MC_LOCAL(uint16_t, u16Value);
6167 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6168 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6169 IEM_MC_ADVANCE_RIP();
6170 IEM_MC_END();
6171 return VINF_SUCCESS;
6172
6173 case IEMMODE_32BIT:
6174 IEM_MC_BEGIN(0, 1);
6175 IEM_MC_LOCAL(uint32_t, u32Value);
6176 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6177 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 return VINF_SUCCESS;
6181
6182 case IEMMODE_64BIT:
6183 IEM_MC_BEGIN(0, 1);
6184 IEM_MC_LOCAL(uint64_t, u64Value);
6185 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6186 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6187 IEM_MC_ADVANCE_RIP();
6188 IEM_MC_END();
6189 return VINF_SUCCESS;
6190
6191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6192 }
6193 }
6194 else
6195 {
6196 /*
6197 * We're loading a register from memory.
6198 */
6199 switch (pIemCpu->enmEffOpSize)
6200 {
6201 case IEMMODE_16BIT:
6202 IEM_MC_BEGIN(0, 2);
6203 IEM_MC_LOCAL(uint16_t, u16Value);
6204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6206 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6207 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6208 IEM_MC_ADVANCE_RIP();
6209 IEM_MC_END();
6210 return VINF_SUCCESS;
6211
6212 case IEMMODE_32BIT:
6213 IEM_MC_BEGIN(0, 2);
6214 IEM_MC_LOCAL(uint32_t, u32Value);
6215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6217 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6218 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6219 IEM_MC_ADVANCE_RIP();
6220 IEM_MC_END();
6221 return VINF_SUCCESS;
6222
6223 case IEMMODE_64BIT:
6224 IEM_MC_BEGIN(0, 2);
6225 IEM_MC_LOCAL(uint64_t, u64Value);
6226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6228 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6229 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6230 IEM_MC_ADVANCE_RIP();
6231 IEM_MC_END();
6232 return VINF_SUCCESS;
6233
6234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6235 }
6236 }
6237}
6238
6239
6240/** Opcode 0x0f 0xbf. */
6241FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6242{
6243 IEMOP_MNEMONIC("movsx Gv,Ew");
6244 IEMOP_HLP_MIN_386();
6245
6246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6247 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6248
6249 /** @todo Not entirely sure how the operand size prefix is handled here,
6250 * assuming that it will be ignored. Would be nice to have a few
6251 * test for this. */
6252 /*
6253 * If rm is denoting a register, no more instruction bytes.
6254 */
6255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6256 {
6257 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6258 {
6259 IEM_MC_BEGIN(0, 1);
6260 IEM_MC_LOCAL(uint32_t, u32Value);
6261 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6262 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6263 IEM_MC_ADVANCE_RIP();
6264 IEM_MC_END();
6265 }
6266 else
6267 {
6268 IEM_MC_BEGIN(0, 1);
6269 IEM_MC_LOCAL(uint64_t, u64Value);
6270 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6271 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6272 IEM_MC_ADVANCE_RIP();
6273 IEM_MC_END();
6274 }
6275 }
6276 else
6277 {
6278 /*
6279 * We're loading a register from memory.
6280 */
6281 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6282 {
6283 IEM_MC_BEGIN(0, 2);
6284 IEM_MC_LOCAL(uint32_t, u32Value);
6285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6287 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6288 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 }
6292 else
6293 {
6294 IEM_MC_BEGIN(0, 2);
6295 IEM_MC_LOCAL(uint64_t, u64Value);
6296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6298 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6299 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6300 IEM_MC_ADVANCE_RIP();
6301 IEM_MC_END();
6302 }
6303 }
6304 return VINF_SUCCESS;
6305}
6306
6307
6308/** Opcode 0x0f 0xc0. */
6309FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6310{
6311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6312 IEMOP_HLP_MIN_486();
6313 IEMOP_MNEMONIC("xadd Eb,Gb");
6314
6315 /*
6316 * If rm is denoting a register, no more instruction bytes.
6317 */
6318 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6319 {
6320 IEMOP_HLP_NO_LOCK_PREFIX();
6321
6322 IEM_MC_BEGIN(3, 0);
6323 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6324 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6326
6327 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6328 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6329 IEM_MC_REF_EFLAGS(pEFlags);
6330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6331
6332 IEM_MC_ADVANCE_RIP();
6333 IEM_MC_END();
6334 }
6335 else
6336 {
6337 /*
6338 * We're accessing memory.
6339 */
6340 IEM_MC_BEGIN(3, 3);
6341 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6342 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6343 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6344 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6346
6347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6348 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6349 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6350 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6351 IEM_MC_FETCH_EFLAGS(EFlags);
6352 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6353 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6354 else
6355 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6356
6357 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6358 IEM_MC_COMMIT_EFLAGS(EFlags);
6359 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363 }
6364 return VINF_SUCCESS;
6365}
6366
6367
6368/** Opcode 0x0f 0xc1. */
6369FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6370{
6371 IEMOP_MNEMONIC("xadd Ev,Gv");
6372 IEMOP_HLP_MIN_486();
6373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6374
6375 /*
6376 * If rm is denoting a register, no more instruction bytes.
6377 */
6378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6379 {
6380 IEMOP_HLP_NO_LOCK_PREFIX();
6381
6382 switch (pIemCpu->enmEffOpSize)
6383 {
6384 case IEMMODE_16BIT:
6385 IEM_MC_BEGIN(3, 0);
6386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6387 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6388 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6389
6390 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6391 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6392 IEM_MC_REF_EFLAGS(pEFlags);
6393 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6394
6395 IEM_MC_ADVANCE_RIP();
6396 IEM_MC_END();
6397 return VINF_SUCCESS;
6398
6399 case IEMMODE_32BIT:
6400 IEM_MC_BEGIN(3, 0);
6401 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6402 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6403 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6404
6405 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6406 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6407 IEM_MC_REF_EFLAGS(pEFlags);
6408 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6409
6410 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6411 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6412 IEM_MC_ADVANCE_RIP();
6413 IEM_MC_END();
6414 return VINF_SUCCESS;
6415
6416 case IEMMODE_64BIT:
6417 IEM_MC_BEGIN(3, 0);
6418 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6419 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6420 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6421
6422 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6423 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6424 IEM_MC_REF_EFLAGS(pEFlags);
6425 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6426
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6432 }
6433 }
6434 else
6435 {
6436 /*
6437 * We're accessing memory.
6438 */
6439 switch (pIemCpu->enmEffOpSize)
6440 {
6441 case IEMMODE_16BIT:
6442 IEM_MC_BEGIN(3, 3);
6443 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6444 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6446 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6448
6449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6450 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6451 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6452 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6453 IEM_MC_FETCH_EFLAGS(EFlags);
6454 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6455 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6456 else
6457 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6458
6459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6460 IEM_MC_COMMIT_EFLAGS(EFlags);
6461 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6462 IEM_MC_ADVANCE_RIP();
6463 IEM_MC_END();
6464 return VINF_SUCCESS;
6465
6466 case IEMMODE_32BIT:
6467 IEM_MC_BEGIN(3, 3);
6468 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6469 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6470 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6471 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6473
6474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6475 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6476 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6477 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6478 IEM_MC_FETCH_EFLAGS(EFlags);
6479 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6480 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6481 else
6482 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6483
6484 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6485 IEM_MC_COMMIT_EFLAGS(EFlags);
6486 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6487 IEM_MC_ADVANCE_RIP();
6488 IEM_MC_END();
6489 return VINF_SUCCESS;
6490
6491 case IEMMODE_64BIT:
6492 IEM_MC_BEGIN(3, 3);
6493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6494 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6495 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6496 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6498
6499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6500 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6501 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6502 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6503 IEM_MC_FETCH_EFLAGS(EFlags);
6504 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6505 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6506 else
6507 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6508
6509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6510 IEM_MC_COMMIT_EFLAGS(EFlags);
6511 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6512 IEM_MC_ADVANCE_RIP();
6513 IEM_MC_END();
6514 return VINF_SUCCESS;
6515
6516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6517 }
6518 }
6519}
6520
6521/** Opcode 0x0f 0xc2. */
6522FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6523
6524
6525/** Opcode 0x0f 0xc3. */
6526#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
6527FNIEMOP_DEF(iemOp_movnti_My_Gy)
6528{
6529 IEMOP_MNEMONIC("movnti My,Gy");
6530
6531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6532
6533 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6534 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6535 {
6536 switch (pIemCpu->enmEffOpSize)
6537 {
6538 case IEMMODE_32BIT:
6539 IEM_MC_BEGIN(0, 2);
6540 IEM_MC_LOCAL(uint32_t, u32Value);
6541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6542
6543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6546 return IEMOP_RAISE_INVALID_OPCODE();
6547
6548 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6549 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
6550 IEM_MC_ADVANCE_RIP();
6551 IEM_MC_END();
6552 break;
6553
6554 case IEMMODE_64BIT:
6555 IEM_MC_BEGIN(0, 2);
6556 IEM_MC_LOCAL(uint64_t, u64Value);
6557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6558
6559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6561 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6562 return IEMOP_RAISE_INVALID_OPCODE();
6563
6564 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6565 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
6566 IEM_MC_ADVANCE_RIP();
6567 IEM_MC_END();
6568 break;
6569
6570 case IEMMODE_16BIT:
6571 /** @todo check this form. */
6572 return IEMOP_RAISE_INVALID_OPCODE();
6573 }
6574 }
6575 else
6576 return IEMOP_RAISE_INVALID_OPCODE();
6577 return VINF_SUCCESS;
6578}
6579#else
6580FNIEMOP_STUB(iemOp_movnti_My_Gy); // solaris 10 uses this in hat_pte_zero().
6581#endif
6582
6583
6584/** Opcode 0x0f 0xc4. */
6585FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6586
6587/** Opcode 0x0f 0xc5. */
6588FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6589
6590/** Opcode 0x0f 0xc6. */
6591FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6592
6593
6594/** Opcode 0x0f 0xc7 !11/1. */
6595FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6596{
6597 IEMOP_MNEMONIC("cmpxchg8b Mq");
6598
6599 IEM_MC_BEGIN(4, 3);
6600 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6601 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6602 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6603 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6604 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6605 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6607
6608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6609 IEMOP_HLP_DONE_DECODING();
6610 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6611
6612 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6613 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6614 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6615
6616 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6617 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6618 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6619
6620 IEM_MC_FETCH_EFLAGS(EFlags);
6621 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6622 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6623 else
6624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6625
6626 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6627 IEM_MC_COMMIT_EFLAGS(EFlags);
6628 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6629 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6630 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6631 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6632 IEM_MC_ENDIF();
6633 IEM_MC_ADVANCE_RIP();
6634
6635 IEM_MC_END();
6636 return VINF_SUCCESS;
6637}
6638
6639
6640/** Opcode REX.W 0x0f 0xc7 !11/1. */
6641FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6642
6643/** Opcode 0x0f 0xc7 11/6. */
6644FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6645
6646/** Opcode 0x0f 0xc7 !11/6. */
6647FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6648
6649/** Opcode 0x66 0x0f 0xc7 !11/6. */
6650FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6651
6652/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6653FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6654
6655/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6656FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6657
6658
6659/** Opcode 0x0f 0xc7. */
6660FNIEMOP_DEF(iemOp_Grp9)
6661{
6662 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6664 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6665 {
6666 case 0: case 2: case 3: case 4: case 5:
6667 return IEMOP_RAISE_INVALID_OPCODE();
6668 case 1:
6669 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6670 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6671 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6672 return IEMOP_RAISE_INVALID_OPCODE();
6673 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6674 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6675 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6676 case 6:
6677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6678 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6679 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6680 {
6681 case 0:
6682 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6683 case IEM_OP_PRF_SIZE_OP:
6684 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6685 case IEM_OP_PRF_REPZ:
6686 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6687 default:
6688 return IEMOP_RAISE_INVALID_OPCODE();
6689 }
6690 case 7:
6691 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6692 {
6693 case 0:
6694 case IEM_OP_PRF_REPZ:
6695 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6696 default:
6697 return IEMOP_RAISE_INVALID_OPCODE();
6698 }
6699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6700 }
6701}
6702
6703
6704/**
6705 * Common 'bswap register' helper.
6706 */
6707FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6708{
6709 IEMOP_HLP_NO_LOCK_PREFIX();
6710 switch (pIemCpu->enmEffOpSize)
6711 {
6712 case IEMMODE_16BIT:
6713 IEM_MC_BEGIN(1, 0);
6714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6715 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6716 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6717 IEM_MC_ADVANCE_RIP();
6718 IEM_MC_END();
6719 return VINF_SUCCESS;
6720
6721 case IEMMODE_32BIT:
6722 IEM_MC_BEGIN(1, 0);
6723 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6724 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6725 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6726 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6727 IEM_MC_ADVANCE_RIP();
6728 IEM_MC_END();
6729 return VINF_SUCCESS;
6730
6731 case IEMMODE_64BIT:
6732 IEM_MC_BEGIN(1, 0);
6733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6734 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6735 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6736 IEM_MC_ADVANCE_RIP();
6737 IEM_MC_END();
6738 return VINF_SUCCESS;
6739
6740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6741 }
6742}
6743
6744
6745/** Opcode 0x0f 0xc8. */
6746FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6747{
6748 IEMOP_MNEMONIC("bswap rAX/r8");
6749 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6750 prefix. REX.B is the correct prefix it appears. For a parallel
6751 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6752 IEMOP_HLP_MIN_486();
6753 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6754}
6755
6756
6757/** Opcode 0x0f 0xc9. */
6758FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6759{
6760 IEMOP_MNEMONIC("bswap rCX/r9");
6761 IEMOP_HLP_MIN_486();
6762 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6763}
6764
6765
6766/** Opcode 0x0f 0xca. */
6767FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6768{
6769 IEMOP_MNEMONIC("bswap rDX/r9");
6770 IEMOP_HLP_MIN_486();
6771 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6772}
6773
6774
6775/** Opcode 0x0f 0xcb. */
6776FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6777{
6778 IEMOP_MNEMONIC("bswap rBX/r9");
6779 IEMOP_HLP_MIN_486();
6780 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6781}
6782
6783
6784/** Opcode 0x0f 0xcc. */
6785FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6786{
6787 IEMOP_MNEMONIC("bswap rSP/r12");
6788 IEMOP_HLP_MIN_486();
6789 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6790}
6791
6792
6793/** Opcode 0x0f 0xcd. */
6794FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6795{
6796 IEMOP_MNEMONIC("bswap rBP/r13");
6797 IEMOP_HLP_MIN_486();
6798 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6799}
6800
6801
6802/** Opcode 0x0f 0xce. */
6803FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6804{
6805 IEMOP_MNEMONIC("bswap rSI/r14");
6806 IEMOP_HLP_MIN_486();
6807 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6808}
6809
6810
6811/** Opcode 0x0f 0xcf. */
6812FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6813{
6814 IEMOP_MNEMONIC("bswap rDI/r15");
6815 IEMOP_HLP_MIN_486();
6816 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6817}
6818
6819
6820
6821/** Opcode 0x0f 0xd0. */
6822FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6823/** Opcode 0x0f 0xd1. */
6824FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6825/** Opcode 0x0f 0xd2. */
6826FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6827/** Opcode 0x0f 0xd3. */
6828FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6829/** Opcode 0x0f 0xd4. */
6830FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6831/** Opcode 0x0f 0xd5. */
6832FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6833/** Opcode 0x0f 0xd6. */
6834FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6835
6836
6837/** Opcode 0x0f 0xd7. */
6838FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6839{
6840 /* Docs says register only. */
6841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6842 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6843 return IEMOP_RAISE_INVALID_OPCODE();
6844
6845 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6846 /** @todo testcase: Check that the instruction implicitly clears the high
6847 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6848 * and opcode modifications are made to work with the whole width (not
6849 * just 128). */
6850 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6851 {
6852 case IEM_OP_PRF_SIZE_OP: /* SSE */
6853 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6854 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6855 IEM_MC_BEGIN(2, 0);
6856 IEM_MC_ARG(uint64_t *, pDst, 0);
6857 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6858 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6859 IEM_MC_PREPARE_SSE_USAGE();
6860 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6861 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6862 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 return VINF_SUCCESS;
6866
6867 case 0: /* MMX */
6868 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6869 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6870 IEM_MC_BEGIN(2, 0);
6871 IEM_MC_ARG(uint64_t *, pDst, 0);
6872 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6873 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6874 IEM_MC_PREPARE_FPU_USAGE();
6875 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6876 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6877 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6878 IEM_MC_ADVANCE_RIP();
6879 IEM_MC_END();
6880 return VINF_SUCCESS;
6881
6882 default:
6883 return IEMOP_RAISE_INVALID_OPCODE();
6884 }
6885}
6886
6887
6888/** Opcode 0x0f 0xd8. */
6889FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6890/** Opcode 0x0f 0xd9. */
6891FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6892/** Opcode 0x0f 0xda. */
6893FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6894/** Opcode 0x0f 0xdb. */
6895FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6896/** Opcode 0x0f 0xdc. */
6897FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6898/** Opcode 0x0f 0xdd. */
6899FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6900/** Opcode 0x0f 0xde. */
6901FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6902/** Opcode 0x0f 0xdf. */
6903FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6904/** Opcode 0x0f 0xe0. */
6905FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6906/** Opcode 0x0f 0xe1. */
6907FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6908/** Opcode 0x0f 0xe2. */
6909FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6910/** Opcode 0x0f 0xe3. */
6911FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6912/** Opcode 0x0f 0xe4. */
6913FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6914/** Opcode 0x0f 0xe5. */
6915FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6916/** Opcode 0x0f 0xe6. */
6917FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6918
6919
6920/** Opcode 0x0f 0xe7. */
6921#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
6922FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
6923{
6924 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
6925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6926 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6927 {
6928 /*
6929 * Register, memory.
6930 */
6931/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
6932 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6933 {
6934
6935 case IEM_OP_PRF_SIZE_OP: /* SSE */
6936 IEM_MC_BEGIN(0, 2);
6937 IEM_MC_LOCAL(uint128_t, uSrc);
6938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6939
6940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6942 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6943 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6944
6945 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6946 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6947
6948 IEM_MC_ADVANCE_RIP();
6949 IEM_MC_END();
6950 break;
6951
6952 case 0: /* MMX */
6953 IEM_MC_BEGIN(0, 2);
6954 IEM_MC_LOCAL(uint64_t, uSrc);
6955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6956
6957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6959 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6960 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6961
6962 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6963 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6964
6965 IEM_MC_ADVANCE_RIP();
6966 IEM_MC_END();
6967 break;
6968
6969 default:
6970 return IEMOP_RAISE_INVALID_OPCODE();
6971 }
6972 }
6973 /* The register, register encoding is invalid. */
6974 else
6975 return IEMOP_RAISE_INVALID_OPCODE();
6976 return VINF_SUCCESS;
6977}
6978#else
6979FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6980#endif
6981
6982
6983/** Opcode 0x0f 0xe8. */
6984FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6985/** Opcode 0x0f 0xe9. */
6986FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6987/** Opcode 0x0f 0xea. */
6988FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6989/** Opcode 0x0f 0xeb. */
6990FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6991/** Opcode 0x0f 0xec. */
6992FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6993/** Opcode 0x0f 0xed. */
6994FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6995/** Opcode 0x0f 0xee. */
6996FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6997
6998
6999/** Opcode 0x0f 0xef. */
7000FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7001{
7002 IEMOP_MNEMONIC("pxor");
7003 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7004}
7005
7006
7007/** Opcode 0x0f 0xf0. */
7008FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7009/** Opcode 0x0f 0xf1. */
7010FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7011/** Opcode 0x0f 0xf2. */
7012FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7013/** Opcode 0x0f 0xf3. */
7014FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7015/** Opcode 0x0f 0xf4. */
7016FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7017/** Opcode 0x0f 0xf5. */
7018FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7019/** Opcode 0x0f 0xf6. */
7020FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7021/** Opcode 0x0f 0xf7. */
7022FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7023/** Opcode 0x0f 0xf8. */
7024FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7025/** Opcode 0x0f 0xf9. */
7026FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7027/** Opcode 0x0f 0xfa. */
7028FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7029/** Opcode 0x0f 0xfb. */
7030FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7031/** Opcode 0x0f 0xfc. */
7032FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7033/** Opcode 0x0f 0xfd. */
7034FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7035/** Opcode 0x0f 0xfe. */
7036FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7037
7038
7039const PFNIEMOP g_apfnTwoByteMap[256] =
7040{
7041 /* 0x00 */ iemOp_Grp6,
7042 /* 0x01 */ iemOp_Grp7,
7043 /* 0x02 */ iemOp_lar_Gv_Ew,
7044 /* 0x03 */ iemOp_lsl_Gv_Ew,
7045 /* 0x04 */ iemOp_Invalid,
7046 /* 0x05 */ iemOp_syscall,
7047 /* 0x06 */ iemOp_clts,
7048 /* 0x07 */ iemOp_sysret,
7049 /* 0x08 */ iemOp_invd,
7050 /* 0x09 */ iemOp_wbinvd,
7051 /* 0x0a */ iemOp_Invalid,
7052 /* 0x0b */ iemOp_ud2,
7053 /* 0x0c */ iemOp_Invalid,
7054 /* 0x0d */ iemOp_nop_Ev_GrpP,
7055 /* 0x0e */ iemOp_femms,
7056 /* 0x0f */ iemOp_3Dnow,
7057 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7058 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7059 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7060 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7061 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7062 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7063 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7064 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7065 /* 0x18 */ iemOp_prefetch_Grp16,
7066 /* 0x19 */ iemOp_nop_Ev,
7067 /* 0x1a */ iemOp_nop_Ev,
7068 /* 0x1b */ iemOp_nop_Ev,
7069 /* 0x1c */ iemOp_nop_Ev,
7070 /* 0x1d */ iemOp_nop_Ev,
7071 /* 0x1e */ iemOp_nop_Ev,
7072 /* 0x1f */ iemOp_nop_Ev,
7073 /* 0x20 */ iemOp_mov_Rd_Cd,
7074 /* 0x21 */ iemOp_mov_Rd_Dd,
7075 /* 0x22 */ iemOp_mov_Cd_Rd,
7076 /* 0x23 */ iemOp_mov_Dd_Rd,
7077 /* 0x24 */ iemOp_mov_Rd_Td,
7078 /* 0x25 */ iemOp_Invalid,
7079 /* 0x26 */ iemOp_mov_Td_Rd,
7080 /* 0x27 */ iemOp_Invalid,
7081 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7082 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7083 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7084 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7085 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7086 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7087 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7088 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7089 /* 0x30 */ iemOp_wrmsr,
7090 /* 0x31 */ iemOp_rdtsc,
7091 /* 0x32 */ iemOp_rdmsr,
7092 /* 0x33 */ iemOp_rdpmc,
7093 /* 0x34 */ iemOp_sysenter,
7094 /* 0x35 */ iemOp_sysexit,
7095 /* 0x36 */ iemOp_Invalid,
7096 /* 0x37 */ iemOp_getsec,
7097 /* 0x38 */ iemOp_3byte_Esc_A4,
7098 /* 0x39 */ iemOp_Invalid,
7099 /* 0x3a */ iemOp_3byte_Esc_A5,
7100 /* 0x3b */ iemOp_Invalid,
7101 /* 0x3c */ iemOp_Invalid,
7102 /* 0x3d */ iemOp_Invalid,
7103 /* 0x3e */ iemOp_Invalid,
7104 /* 0x3f */ iemOp_Invalid,
7105 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7106 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7107 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7108 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7109 /* 0x44 */ iemOp_cmove_Gv_Ev,
7110 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7111 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7112 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7113 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7114 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7115 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7116 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7117 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7118 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7119 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7120 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7121 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7122 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7123 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7124 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7125 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7126 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7127 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7128 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7129 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7130 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7131 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7132 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7133 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7134 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7135 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7136 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7137 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7138 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7139 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7140 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7141 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7142 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7143 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7144 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7145 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7146 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7147 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7148 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7149 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7150 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7151 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7152 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7153 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7154 /* 0x71 */ iemOp_Grp12,
7155 /* 0x72 */ iemOp_Grp13,
7156 /* 0x73 */ iemOp_Grp14,
7157 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7158 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7159 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7160 /* 0x77 */ iemOp_emms,
7161 /* 0x78 */ iemOp_vmread_AmdGrp17,
7162 /* 0x79 */ iemOp_vmwrite,
7163 /* 0x7a */ iemOp_Invalid,
7164 /* 0x7b */ iemOp_Invalid,
7165 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7166 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7167 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7168 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7169 /* 0x80 */ iemOp_jo_Jv,
7170 /* 0x81 */ iemOp_jno_Jv,
7171 /* 0x82 */ iemOp_jc_Jv,
7172 /* 0x83 */ iemOp_jnc_Jv,
7173 /* 0x84 */ iemOp_je_Jv,
7174 /* 0x85 */ iemOp_jne_Jv,
7175 /* 0x86 */ iemOp_jbe_Jv,
7176 /* 0x87 */ iemOp_jnbe_Jv,
7177 /* 0x88 */ iemOp_js_Jv,
7178 /* 0x89 */ iemOp_jns_Jv,
7179 /* 0x8a */ iemOp_jp_Jv,
7180 /* 0x8b */ iemOp_jnp_Jv,
7181 /* 0x8c */ iemOp_jl_Jv,
7182 /* 0x8d */ iemOp_jnl_Jv,
7183 /* 0x8e */ iemOp_jle_Jv,
7184 /* 0x8f */ iemOp_jnle_Jv,
7185 /* 0x90 */ iemOp_seto_Eb,
7186 /* 0x91 */ iemOp_setno_Eb,
7187 /* 0x92 */ iemOp_setc_Eb,
7188 /* 0x93 */ iemOp_setnc_Eb,
7189 /* 0x94 */ iemOp_sete_Eb,
7190 /* 0x95 */ iemOp_setne_Eb,
7191 /* 0x96 */ iemOp_setbe_Eb,
7192 /* 0x97 */ iemOp_setnbe_Eb,
7193 /* 0x98 */ iemOp_sets_Eb,
7194 /* 0x99 */ iemOp_setns_Eb,
7195 /* 0x9a */ iemOp_setp_Eb,
7196 /* 0x9b */ iemOp_setnp_Eb,
7197 /* 0x9c */ iemOp_setl_Eb,
7198 /* 0x9d */ iemOp_setnl_Eb,
7199 /* 0x9e */ iemOp_setle_Eb,
7200 /* 0x9f */ iemOp_setnle_Eb,
7201 /* 0xa0 */ iemOp_push_fs,
7202 /* 0xa1 */ iemOp_pop_fs,
7203 /* 0xa2 */ iemOp_cpuid,
7204 /* 0xa3 */ iemOp_bt_Ev_Gv,
7205 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7206 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7207 /* 0xa6 */ iemOp_Invalid,
7208 /* 0xa7 */ iemOp_Invalid,
7209 /* 0xa8 */ iemOp_push_gs,
7210 /* 0xa9 */ iemOp_pop_gs,
7211 /* 0xaa */ iemOp_rsm,
7212 /* 0xab */ iemOp_bts_Ev_Gv,
7213 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7214 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7215 /* 0xae */ iemOp_Grp15,
7216 /* 0xaf */ iemOp_imul_Gv_Ev,
7217 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7218 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7219 /* 0xb2 */ iemOp_lss_Gv_Mp,
7220 /* 0xb3 */ iemOp_btr_Ev_Gv,
7221 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7222 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7223 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7224 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7225 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7226 /* 0xb9 */ iemOp_Grp10,
7227 /* 0xba */ iemOp_Grp8,
7228 /* 0xbd */ iemOp_btc_Ev_Gv,
7229 /* 0xbc */ iemOp_bsf_Gv_Ev,
7230 /* 0xbd */ iemOp_bsr_Gv_Ev,
7231 /* 0xbe */ iemOp_movsx_Gv_Eb,
7232 /* 0xbf */ iemOp_movsx_Gv_Ew,
7233 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7234 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7235 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7236 /* 0xc3 */ iemOp_movnti_My_Gy,
7237 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7238 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7239 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7240 /* 0xc7 */ iemOp_Grp9,
7241 /* 0xc8 */ iemOp_bswap_rAX_r8,
7242 /* 0xc9 */ iemOp_bswap_rCX_r9,
7243 /* 0xca */ iemOp_bswap_rDX_r10,
7244 /* 0xcb */ iemOp_bswap_rBX_r11,
7245 /* 0xcc */ iemOp_bswap_rSP_r12,
7246 /* 0xcd */ iemOp_bswap_rBP_r13,
7247 /* 0xce */ iemOp_bswap_rSI_r14,
7248 /* 0xcf */ iemOp_bswap_rDI_r15,
7249 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7250 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7251 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7252 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7253 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7254 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7255 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7256 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7257 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7258 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7259 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7260 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7261 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7262 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7263 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7264 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7265 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7266 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7267 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7268 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7269 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7270 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7271 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7272 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7273 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7274 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7275 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7276 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7277 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7278 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7279 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7280 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7281 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7282 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7283 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7284 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7285 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7286 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7287 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7288 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7289 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7290 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7291 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7292 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7293 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7294 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7295 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7296 /* 0xff */ iemOp_Invalid
7297};
7298
7299/** @} */
7300
7301
7302/** @name One byte opcodes.
7303 *
7304 * @{
7305 */
7306
7307/** Opcode 0x00. */
7308FNIEMOP_DEF(iemOp_add_Eb_Gb)
7309{
7310 IEMOP_MNEMONIC("add Eb,Gb");
7311 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7312}
7313
7314
7315/** Opcode 0x01. */
7316FNIEMOP_DEF(iemOp_add_Ev_Gv)
7317{
7318 IEMOP_MNEMONIC("add Ev,Gv");
7319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7320}
7321
7322
7323/** Opcode 0x02. */
7324FNIEMOP_DEF(iemOp_add_Gb_Eb)
7325{
7326 IEMOP_MNEMONIC("add Gb,Eb");
7327 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7328}
7329
7330
7331/** Opcode 0x03. */
7332FNIEMOP_DEF(iemOp_add_Gv_Ev)
7333{
7334 IEMOP_MNEMONIC("add Gv,Ev");
7335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7336}
7337
7338
7339/** Opcode 0x04. */
7340FNIEMOP_DEF(iemOp_add_Al_Ib)
7341{
7342 IEMOP_MNEMONIC("add al,Ib");
7343 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7344}
7345
7346
7347/** Opcode 0x05. */
7348FNIEMOP_DEF(iemOp_add_eAX_Iz)
7349{
7350 IEMOP_MNEMONIC("add rAX,Iz");
7351 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7352}
7353
7354
7355/** Opcode 0x06. */
7356FNIEMOP_DEF(iemOp_push_ES)
7357{
7358 IEMOP_MNEMONIC("push es");
7359 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7360}
7361
7362
7363/** Opcode 0x07. */
7364FNIEMOP_DEF(iemOp_pop_ES)
7365{
7366 IEMOP_MNEMONIC("pop es");
7367 IEMOP_HLP_NO_64BIT();
7368 IEMOP_HLP_NO_LOCK_PREFIX();
7369 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7370}
7371
7372
7373/** Opcode 0x08. */
7374FNIEMOP_DEF(iemOp_or_Eb_Gb)
7375{
7376 IEMOP_MNEMONIC("or Eb,Gb");
7377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7378 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7379}
7380
7381
7382/** Opcode 0x09. */
7383FNIEMOP_DEF(iemOp_or_Ev_Gv)
7384{
7385 IEMOP_MNEMONIC("or Ev,Gv ");
7386 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7387 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7388}
7389
7390
7391/** Opcode 0x0a. */
7392FNIEMOP_DEF(iemOp_or_Gb_Eb)
7393{
7394 IEMOP_MNEMONIC("or Gb,Eb");
7395 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7396 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7397}
7398
7399
7400/** Opcode 0x0b. */
7401FNIEMOP_DEF(iemOp_or_Gv_Ev)
7402{
7403 IEMOP_MNEMONIC("or Gv,Ev");
7404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7405 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7406}
7407
7408
7409/** Opcode 0x0c. */
7410FNIEMOP_DEF(iemOp_or_Al_Ib)
7411{
7412 IEMOP_MNEMONIC("or al,Ib");
7413 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7414 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7415}
7416
7417
7418/** Opcode 0x0d. */
7419FNIEMOP_DEF(iemOp_or_eAX_Iz)
7420{
7421 IEMOP_MNEMONIC("or rAX,Iz");
7422 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7423 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7424}
7425
7426
7427/** Opcode 0x0e. */
7428FNIEMOP_DEF(iemOp_push_CS)
7429{
7430 IEMOP_MNEMONIC("push cs");
7431 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7432}
7433
7434
7435/** Opcode 0x0f. */
7436FNIEMOP_DEF(iemOp_2byteEscape)
7437{
7438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7439 /** @todo PUSH CS on 8086, undefined on 80186. */
7440 IEMOP_HLP_MIN_286();
7441 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7442}
7443
7444/** Opcode 0x10. */
7445FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7446{
7447 IEMOP_MNEMONIC("adc Eb,Gb");
7448 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7449}
7450
7451
7452/** Opcode 0x11. */
7453FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7454{
7455 IEMOP_MNEMONIC("adc Ev,Gv");
7456 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7457}
7458
7459
7460/** Opcode 0x12. */
7461FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7462{
7463 IEMOP_MNEMONIC("adc Gb,Eb");
7464 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7465}
7466
7467
7468/** Opcode 0x13. */
7469FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7470{
7471 IEMOP_MNEMONIC("adc Gv,Ev");
7472 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7473}
7474
7475
7476/** Opcode 0x14. */
7477FNIEMOP_DEF(iemOp_adc_Al_Ib)
7478{
7479 IEMOP_MNEMONIC("adc al,Ib");
7480 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7481}
7482
7483
7484/** Opcode 0x15. */
7485FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7486{
7487 IEMOP_MNEMONIC("adc rAX,Iz");
7488 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7489}
7490
7491
7492/** Opcode 0x16. */
7493FNIEMOP_DEF(iemOp_push_SS)
7494{
7495 IEMOP_MNEMONIC("push ss");
7496 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7497}
7498
7499
7500/** Opcode 0x17. */
7501FNIEMOP_DEF(iemOp_pop_SS)
7502{
7503 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7504 IEMOP_HLP_NO_LOCK_PREFIX();
7505 IEMOP_HLP_NO_64BIT();
7506 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7507}
7508
7509
7510/** Opcode 0x18. */
7511FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7512{
7513 IEMOP_MNEMONIC("sbb Eb,Gb");
7514 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7515}
7516
7517
7518/** Opcode 0x19. */
7519FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7520{
7521 IEMOP_MNEMONIC("sbb Ev,Gv");
7522 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7523}
7524
7525
7526/** Opcode 0x1a. */
7527FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7528{
7529 IEMOP_MNEMONIC("sbb Gb,Eb");
7530 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7531}
7532
7533
7534/** Opcode 0x1b. */
7535FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7536{
7537 IEMOP_MNEMONIC("sbb Gv,Ev");
7538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7539}
7540
7541
7542/** Opcode 0x1c. */
7543FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7544{
7545 IEMOP_MNEMONIC("sbb al,Ib");
7546 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7547}
7548
7549
7550/** Opcode 0x1d. */
7551FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7552{
7553 IEMOP_MNEMONIC("sbb rAX,Iz");
7554 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7555}
7556
7557
7558/** Opcode 0x1e. */
7559FNIEMOP_DEF(iemOp_push_DS)
7560{
7561 IEMOP_MNEMONIC("push ds");
7562 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7563}
7564
7565
7566/** Opcode 0x1f. */
7567FNIEMOP_DEF(iemOp_pop_DS)
7568{
7569 IEMOP_MNEMONIC("pop ds");
7570 IEMOP_HLP_NO_LOCK_PREFIX();
7571 IEMOP_HLP_NO_64BIT();
7572 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7573}
7574
7575
7576/** Opcode 0x20. */
7577FNIEMOP_DEF(iemOp_and_Eb_Gb)
7578{
7579 IEMOP_MNEMONIC("and Eb,Gb");
7580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7582}
7583
7584
7585/** Opcode 0x21. */
7586FNIEMOP_DEF(iemOp_and_Ev_Gv)
7587{
7588 IEMOP_MNEMONIC("and Ev,Gv");
7589 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7590 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7591}
7592
7593
7594/** Opcode 0x22. */
7595FNIEMOP_DEF(iemOp_and_Gb_Eb)
7596{
7597 IEMOP_MNEMONIC("and Gb,Eb");
7598 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7600}
7601
7602
7603/** Opcode 0x23. */
7604FNIEMOP_DEF(iemOp_and_Gv_Ev)
7605{
7606 IEMOP_MNEMONIC("and Gv,Ev");
7607 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7608 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7609}
7610
7611
7612/** Opcode 0x24. */
7613FNIEMOP_DEF(iemOp_and_Al_Ib)
7614{
7615 IEMOP_MNEMONIC("and al,Ib");
7616 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7618}
7619
7620
7621/** Opcode 0x25. */
7622FNIEMOP_DEF(iemOp_and_eAX_Iz)
7623{
7624 IEMOP_MNEMONIC("and rAX,Iz");
7625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7627}
7628
7629
7630/** Opcode 0x26. */
7631FNIEMOP_DEF(iemOp_seg_ES)
7632{
7633 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7634 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7635 pIemCpu->iEffSeg = X86_SREG_ES;
7636
7637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7639}
7640
7641
7642/** Opcode 0x27. */
7643FNIEMOP_DEF(iemOp_daa)
7644{
7645 IEMOP_MNEMONIC("daa AL");
7646 IEMOP_HLP_NO_64BIT();
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7649 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7650}
7651
7652
7653/** Opcode 0x28. */
7654FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7655{
7656 IEMOP_MNEMONIC("sub Eb,Gb");
7657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7658}
7659
7660
7661/** Opcode 0x29. */
7662FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7663{
7664 IEMOP_MNEMONIC("sub Ev,Gv");
7665 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7666}
7667
7668
7669/** Opcode 0x2a. */
7670FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7671{
7672 IEMOP_MNEMONIC("sub Gb,Eb");
7673 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7674}
7675
7676
7677/** Opcode 0x2b. */
7678FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7679{
7680 IEMOP_MNEMONIC("sub Gv,Ev");
7681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7682}
7683
7684
7685/** Opcode 0x2c. */
7686FNIEMOP_DEF(iemOp_sub_Al_Ib)
7687{
7688 IEMOP_MNEMONIC("sub al,Ib");
7689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7690}
7691
7692
7693/** Opcode 0x2d. */
7694FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7695{
7696 IEMOP_MNEMONIC("sub rAX,Iz");
7697 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7698}
7699
7700
7701/** Opcode 0x2e. */
7702FNIEMOP_DEF(iemOp_seg_CS)
7703{
7704 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7705 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7706 pIemCpu->iEffSeg = X86_SREG_CS;
7707
7708 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7709 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7710}
7711
7712
7713/** Opcode 0x2f. */
7714FNIEMOP_DEF(iemOp_das)
7715{
7716 IEMOP_MNEMONIC("das AL");
7717 IEMOP_HLP_NO_64BIT();
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7721}
7722
7723
7724/** Opcode 0x30. */
7725FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7726{
7727 IEMOP_MNEMONIC("xor Eb,Gb");
7728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7729 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7730}
7731
7732
7733/** Opcode 0x31. */
7734FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7735{
7736 IEMOP_MNEMONIC("xor Ev,Gv");
7737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7738 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7739}
7740
7741
7742/** Opcode 0x32. */
7743FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7744{
7745 IEMOP_MNEMONIC("xor Gb,Eb");
7746 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7748}
7749
7750
7751/** Opcode 0x33. */
7752FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7753{
7754 IEMOP_MNEMONIC("xor Gv,Ev");
7755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7756 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7757}
7758
7759
7760/** Opcode 0x34. */
7761FNIEMOP_DEF(iemOp_xor_Al_Ib)
7762{
7763 IEMOP_MNEMONIC("xor al,Ib");
7764 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7765 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7766}
7767
7768
7769/** Opcode 0x35. */
7770FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7771{
7772 IEMOP_MNEMONIC("xor rAX,Iz");
7773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7774 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7775}
7776
7777
7778/** Opcode 0x36. */
7779FNIEMOP_DEF(iemOp_seg_SS)
7780{
7781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7782 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7783 pIemCpu->iEffSeg = X86_SREG_SS;
7784
7785 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7786 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7787}
7788
7789
7790/** Opcode 0x37. */
7791FNIEMOP_STUB(iemOp_aaa);
7792
7793
7794/** Opcode 0x38. */
7795FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7796{
7797 IEMOP_MNEMONIC("cmp Eb,Gb");
7798 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7799 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7800}
7801
7802
7803/** Opcode 0x39. */
7804FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7805{
7806 IEMOP_MNEMONIC("cmp Ev,Gv");
7807 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7808 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7809}
7810
7811
7812/** Opcode 0x3a. */
7813FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7814{
7815 IEMOP_MNEMONIC("cmp Gb,Eb");
7816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7817}
7818
7819
7820/** Opcode 0x3b. */
7821FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7822{
7823 IEMOP_MNEMONIC("cmp Gv,Ev");
7824 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7825}
7826
7827
7828/** Opcode 0x3c. */
7829FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7830{
7831 IEMOP_MNEMONIC("cmp al,Ib");
7832 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7833}
7834
7835
7836/** Opcode 0x3d. */
7837FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7838{
7839 IEMOP_MNEMONIC("cmp rAX,Iz");
7840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7841}
7842
7843
7844/** Opcode 0x3e. */
7845FNIEMOP_DEF(iemOp_seg_DS)
7846{
7847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7848 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7849 pIemCpu->iEffSeg = X86_SREG_DS;
7850
7851 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7852 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7853}
7854
7855
7856/** Opcode 0x3f. */
7857FNIEMOP_STUB(iemOp_aas);
7858
7859/**
7860 * Common 'inc/dec/not/neg register' helper.
7861 */
7862FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7863{
7864 IEMOP_HLP_NO_LOCK_PREFIX();
7865 switch (pIemCpu->enmEffOpSize)
7866 {
7867 case IEMMODE_16BIT:
7868 IEM_MC_BEGIN(2, 0);
7869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7870 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7871 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7872 IEM_MC_REF_EFLAGS(pEFlags);
7873 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7874 IEM_MC_ADVANCE_RIP();
7875 IEM_MC_END();
7876 return VINF_SUCCESS;
7877
7878 case IEMMODE_32BIT:
7879 IEM_MC_BEGIN(2, 0);
7880 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7881 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7882 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7883 IEM_MC_REF_EFLAGS(pEFlags);
7884 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7885 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7886 IEM_MC_ADVANCE_RIP();
7887 IEM_MC_END();
7888 return VINF_SUCCESS;
7889
7890 case IEMMODE_64BIT:
7891 IEM_MC_BEGIN(2, 0);
7892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7893 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7894 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7895 IEM_MC_REF_EFLAGS(pEFlags);
7896 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7897 IEM_MC_ADVANCE_RIP();
7898 IEM_MC_END();
7899 return VINF_SUCCESS;
7900 }
7901 return VINF_SUCCESS;
7902}
7903
7904
7905/** Opcode 0x40. */
7906FNIEMOP_DEF(iemOp_inc_eAX)
7907{
7908 /*
7909 * This is a REX prefix in 64-bit mode.
7910 */
7911 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7912 {
7913 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7914 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7915
7916 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7917 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7918 }
7919
7920 IEMOP_MNEMONIC("inc eAX");
7921 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7922}
7923
7924
7925/** Opcode 0x41. */
7926FNIEMOP_DEF(iemOp_inc_eCX)
7927{
7928 /*
7929 * This is a REX prefix in 64-bit mode.
7930 */
7931 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7932 {
7933 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7934 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7935 pIemCpu->uRexB = 1 << 3;
7936
7937 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7938 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7939 }
7940
7941 IEMOP_MNEMONIC("inc eCX");
7942 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7943}
7944
7945
7946/** Opcode 0x42. */
7947FNIEMOP_DEF(iemOp_inc_eDX)
7948{
7949 /*
7950 * This is a REX prefix in 64-bit mode.
7951 */
7952 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7953 {
7954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7955 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7956 pIemCpu->uRexIndex = 1 << 3;
7957
7958 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7959 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7960 }
7961
7962 IEMOP_MNEMONIC("inc eDX");
7963 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7964}
7965
7966
7967
7968/** Opcode 0x43. */
7969FNIEMOP_DEF(iemOp_inc_eBX)
7970{
7971 /*
7972 * This is a REX prefix in 64-bit mode.
7973 */
7974 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7975 {
7976 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7977 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7978 pIemCpu->uRexB = 1 << 3;
7979 pIemCpu->uRexIndex = 1 << 3;
7980
7981 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7982 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7983 }
7984
7985 IEMOP_MNEMONIC("inc eBX");
7986 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7987}
7988
7989
7990/** Opcode 0x44. */
7991FNIEMOP_DEF(iemOp_inc_eSP)
7992{
7993 /*
7994 * This is a REX prefix in 64-bit mode.
7995 */
7996 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7997 {
7998 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7999 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8000 pIemCpu->uRexReg = 1 << 3;
8001
8002 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8003 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8004 }
8005
8006 IEMOP_MNEMONIC("inc eSP");
8007 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8008}
8009
8010
8011/** Opcode 0x45. */
8012FNIEMOP_DEF(iemOp_inc_eBP)
8013{
8014 /*
8015 * This is a REX prefix in 64-bit mode.
8016 */
8017 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8018 {
8019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8020 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8021 pIemCpu->uRexReg = 1 << 3;
8022 pIemCpu->uRexB = 1 << 3;
8023
8024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8026 }
8027
8028 IEMOP_MNEMONIC("inc eBP");
8029 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8030}
8031
8032
8033/** Opcode 0x46. */
8034FNIEMOP_DEF(iemOp_inc_eSI)
8035{
8036 /*
8037 * This is a REX prefix in 64-bit mode.
8038 */
8039 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8040 {
8041 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8042 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8043 pIemCpu->uRexReg = 1 << 3;
8044 pIemCpu->uRexIndex = 1 << 3;
8045
8046 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8047 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8048 }
8049
8050 IEMOP_MNEMONIC("inc eSI");
8051 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8052}
8053
8054
8055/** Opcode 0x47. */
8056FNIEMOP_DEF(iemOp_inc_eDI)
8057{
8058 /*
8059 * This is a REX prefix in 64-bit mode.
8060 */
8061 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8062 {
8063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8064 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8065 pIemCpu->uRexReg = 1 << 3;
8066 pIemCpu->uRexB = 1 << 3;
8067 pIemCpu->uRexIndex = 1 << 3;
8068
8069 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8070 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8071 }
8072
8073 IEMOP_MNEMONIC("inc eDI");
8074 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8075}
8076
8077
8078/** Opcode 0x48. */
8079FNIEMOP_DEF(iemOp_dec_eAX)
8080{
8081 /*
8082 * This is a REX prefix in 64-bit mode.
8083 */
8084 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8085 {
8086 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8087 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8088 iemRecalEffOpSize(pIemCpu);
8089
8090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8092 }
8093
8094 IEMOP_MNEMONIC("dec eAX");
8095 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8096}
8097
8098
8099/** Opcode 0x49. */
8100FNIEMOP_DEF(iemOp_dec_eCX)
8101{
8102 /*
8103 * This is a REX prefix in 64-bit mode.
8104 */
8105 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8106 {
8107 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8108 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8109 pIemCpu->uRexB = 1 << 3;
8110 iemRecalEffOpSize(pIemCpu);
8111
8112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8114 }
8115
8116 IEMOP_MNEMONIC("dec eCX");
8117 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8118}
8119
8120
8121/** Opcode 0x4a. */
8122FNIEMOP_DEF(iemOp_dec_eDX)
8123{
8124 /*
8125 * This is a REX prefix in 64-bit mode.
8126 */
8127 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8128 {
8129 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8130 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8131 pIemCpu->uRexIndex = 1 << 3;
8132 iemRecalEffOpSize(pIemCpu);
8133
8134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8136 }
8137
8138 IEMOP_MNEMONIC("dec eDX");
8139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8140}
8141
8142
8143/** Opcode 0x4b. */
8144FNIEMOP_DEF(iemOp_dec_eBX)
8145{
8146 /*
8147 * This is a REX prefix in 64-bit mode.
8148 */
8149 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8150 {
8151 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8152 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8153 pIemCpu->uRexB = 1 << 3;
8154 pIemCpu->uRexIndex = 1 << 3;
8155 iemRecalEffOpSize(pIemCpu);
8156
8157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8159 }
8160
8161 IEMOP_MNEMONIC("dec eBX");
8162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8163}
8164
8165
8166/** Opcode 0x4c. */
8167FNIEMOP_DEF(iemOp_dec_eSP)
8168{
8169 /*
8170 * This is a REX prefix in 64-bit mode.
8171 */
8172 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8173 {
8174 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8175 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8176 pIemCpu->uRexReg = 1 << 3;
8177 iemRecalEffOpSize(pIemCpu);
8178
8179 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8180 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8181 }
8182
8183 IEMOP_MNEMONIC("dec eSP");
8184 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8185}
8186
8187
8188/** Opcode 0x4d. */
8189FNIEMOP_DEF(iemOp_dec_eBP)
8190{
8191 /*
8192 * This is a REX prefix in 64-bit mode.
8193 */
8194 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8195 {
8196 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8197 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8198 pIemCpu->uRexReg = 1 << 3;
8199 pIemCpu->uRexB = 1 << 3;
8200 iemRecalEffOpSize(pIemCpu);
8201
8202 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8203 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8204 }
8205
8206 IEMOP_MNEMONIC("dec eBP");
8207 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8208}
8209
8210
8211/** Opcode 0x4e. */
8212FNIEMOP_DEF(iemOp_dec_eSI)
8213{
8214 /*
8215 * This is a REX prefix in 64-bit mode.
8216 */
8217 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8218 {
8219 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8220 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8221 pIemCpu->uRexReg = 1 << 3;
8222 pIemCpu->uRexIndex = 1 << 3;
8223 iemRecalEffOpSize(pIemCpu);
8224
8225 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8226 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8227 }
8228
8229 IEMOP_MNEMONIC("dec eSI");
8230 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8231}
8232
8233
8234/** Opcode 0x4f. */
8235FNIEMOP_DEF(iemOp_dec_eDI)
8236{
8237 /*
8238 * This is a REX prefix in 64-bit mode.
8239 */
8240 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8241 {
8242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8243 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8244 pIemCpu->uRexReg = 1 << 3;
8245 pIemCpu->uRexB = 1 << 3;
8246 pIemCpu->uRexIndex = 1 << 3;
8247 iemRecalEffOpSize(pIemCpu);
8248
8249 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8250 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8251 }
8252
8253 IEMOP_MNEMONIC("dec eDI");
8254 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8255}
8256
8257
8258/**
8259 * Common 'push register' helper.
8260 */
8261FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8262{
8263 IEMOP_HLP_NO_LOCK_PREFIX();
8264 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8265 {
8266 iReg |= pIemCpu->uRexB;
8267 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8268 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8269 }
8270
8271 switch (pIemCpu->enmEffOpSize)
8272 {
8273 case IEMMODE_16BIT:
8274 IEM_MC_BEGIN(0, 1);
8275 IEM_MC_LOCAL(uint16_t, u16Value);
8276 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8277 IEM_MC_PUSH_U16(u16Value);
8278 IEM_MC_ADVANCE_RIP();
8279 IEM_MC_END();
8280 break;
8281
8282 case IEMMODE_32BIT:
8283 IEM_MC_BEGIN(0, 1);
8284 IEM_MC_LOCAL(uint32_t, u32Value);
8285 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8286 IEM_MC_PUSH_U32(u32Value);
8287 IEM_MC_ADVANCE_RIP();
8288 IEM_MC_END();
8289 break;
8290
8291 case IEMMODE_64BIT:
8292 IEM_MC_BEGIN(0, 1);
8293 IEM_MC_LOCAL(uint64_t, u64Value);
8294 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8295 IEM_MC_PUSH_U64(u64Value);
8296 IEM_MC_ADVANCE_RIP();
8297 IEM_MC_END();
8298 break;
8299 }
8300
8301 return VINF_SUCCESS;
8302}
8303
8304
8305/** Opcode 0x50. */
8306FNIEMOP_DEF(iemOp_push_eAX)
8307{
8308 IEMOP_MNEMONIC("push rAX");
8309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8310}
8311
8312
8313/** Opcode 0x51. */
8314FNIEMOP_DEF(iemOp_push_eCX)
8315{
8316 IEMOP_MNEMONIC("push rCX");
8317 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8318}
8319
8320
8321/** Opcode 0x52. */
8322FNIEMOP_DEF(iemOp_push_eDX)
8323{
8324 IEMOP_MNEMONIC("push rDX");
8325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8326}
8327
8328
8329/** Opcode 0x53. */
8330FNIEMOP_DEF(iemOp_push_eBX)
8331{
8332 IEMOP_MNEMONIC("push rBX");
8333 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8334}
8335
8336
8337/** Opcode 0x54. */
8338FNIEMOP_DEF(iemOp_push_eSP)
8339{
8340 IEMOP_MNEMONIC("push rSP");
8341 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8342 {
8343 IEM_MC_BEGIN(0, 1);
8344 IEM_MC_LOCAL(uint16_t, u16Value);
8345 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8346 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8347 IEM_MC_PUSH_U16(u16Value);
8348 IEM_MC_ADVANCE_RIP();
8349 IEM_MC_END();
8350 }
8351 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8352}
8353
8354
8355/** Opcode 0x55. */
8356FNIEMOP_DEF(iemOp_push_eBP)
8357{
8358 IEMOP_MNEMONIC("push rBP");
8359 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8360}
8361
8362
8363/** Opcode 0x56. */
8364FNIEMOP_DEF(iemOp_push_eSI)
8365{
8366 IEMOP_MNEMONIC("push rSI");
8367 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8368}
8369
8370
8371/** Opcode 0x57. */
8372FNIEMOP_DEF(iemOp_push_eDI)
8373{
8374 IEMOP_MNEMONIC("push rDI");
8375 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8376}
8377
8378
8379/**
8380 * Common 'pop register' helper.
8381 */
8382FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8383{
8384 IEMOP_HLP_NO_LOCK_PREFIX();
8385 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8386 {
8387 iReg |= pIemCpu->uRexB;
8388 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8389 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8390 }
8391
8392 switch (pIemCpu->enmEffOpSize)
8393 {
8394 case IEMMODE_16BIT:
8395 IEM_MC_BEGIN(0, 1);
8396 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8397 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8398 IEM_MC_POP_U16(pu16Dst);
8399 IEM_MC_ADVANCE_RIP();
8400 IEM_MC_END();
8401 break;
8402
8403 case IEMMODE_32BIT:
8404 IEM_MC_BEGIN(0, 1);
8405 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8406 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8407 IEM_MC_POP_U32(pu32Dst);
8408 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8409 IEM_MC_ADVANCE_RIP();
8410 IEM_MC_END();
8411 break;
8412
8413 case IEMMODE_64BIT:
8414 IEM_MC_BEGIN(0, 1);
8415 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8416 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8417 IEM_MC_POP_U64(pu64Dst);
8418 IEM_MC_ADVANCE_RIP();
8419 IEM_MC_END();
8420 break;
8421 }
8422
8423 return VINF_SUCCESS;
8424}
8425
8426
8427/** Opcode 0x58. */
8428FNIEMOP_DEF(iemOp_pop_eAX)
8429{
8430 IEMOP_MNEMONIC("pop rAX");
8431 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8432}
8433
8434
8435/** Opcode 0x59. */
8436FNIEMOP_DEF(iemOp_pop_eCX)
8437{
8438 IEMOP_MNEMONIC("pop rCX");
8439 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8440}
8441
8442
8443/** Opcode 0x5a. */
8444FNIEMOP_DEF(iemOp_pop_eDX)
8445{
8446 IEMOP_MNEMONIC("pop rDX");
8447 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8448}
8449
8450
8451/** Opcode 0x5b. */
8452FNIEMOP_DEF(iemOp_pop_eBX)
8453{
8454 IEMOP_MNEMONIC("pop rBX");
8455 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8456}
8457
8458
8459/** Opcode 0x5c. */
8460FNIEMOP_DEF(iemOp_pop_eSP)
8461{
8462 IEMOP_MNEMONIC("pop rSP");
8463 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8464 {
8465 if (pIemCpu->uRexB)
8466 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8467 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8468 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8469 }
8470
8471 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8472 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8473 /** @todo add testcase for this instruction. */
8474 switch (pIemCpu->enmEffOpSize)
8475 {
8476 case IEMMODE_16BIT:
8477 IEM_MC_BEGIN(0, 1);
8478 IEM_MC_LOCAL(uint16_t, u16Dst);
8479 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8480 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8481 IEM_MC_ADVANCE_RIP();
8482 IEM_MC_END();
8483 break;
8484
8485 case IEMMODE_32BIT:
8486 IEM_MC_BEGIN(0, 1);
8487 IEM_MC_LOCAL(uint32_t, u32Dst);
8488 IEM_MC_POP_U32(&u32Dst);
8489 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8490 IEM_MC_ADVANCE_RIP();
8491 IEM_MC_END();
8492 break;
8493
8494 case IEMMODE_64BIT:
8495 IEM_MC_BEGIN(0, 1);
8496 IEM_MC_LOCAL(uint64_t, u64Dst);
8497 IEM_MC_POP_U64(&u64Dst);
8498 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8499 IEM_MC_ADVANCE_RIP();
8500 IEM_MC_END();
8501 break;
8502 }
8503
8504 return VINF_SUCCESS;
8505}
8506
8507
8508/** Opcode 0x5d. */
8509FNIEMOP_DEF(iemOp_pop_eBP)
8510{
8511 IEMOP_MNEMONIC("pop rBP");
8512 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8513}
8514
8515
8516/** Opcode 0x5e. */
8517FNIEMOP_DEF(iemOp_pop_eSI)
8518{
8519 IEMOP_MNEMONIC("pop rSI");
8520 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8521}
8522
8523
8524/** Opcode 0x5f. */
8525FNIEMOP_DEF(iemOp_pop_eDI)
8526{
8527 IEMOP_MNEMONIC("pop rDI");
8528 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8529}
8530
8531
8532/** Opcode 0x60. */
8533FNIEMOP_DEF(iemOp_pusha)
8534{
8535 IEMOP_MNEMONIC("pusha");
8536 IEMOP_HLP_MIN_186();
8537 IEMOP_HLP_NO_64BIT();
8538 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8539 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8540 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8541 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8542}
8543
8544
8545/** Opcode 0x61. */
8546FNIEMOP_DEF(iemOp_popa)
8547{
8548 IEMOP_MNEMONIC("popa");
8549 IEMOP_HLP_MIN_186();
8550 IEMOP_HLP_NO_64BIT();
8551 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8552 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8553 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8554 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8555}
8556
8557
8558/** Opcode 0x62. */
8559FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8560// IEMOP_HLP_MIN_186();
8561
8562
8563/** Opcode 0x63 - non-64-bit modes. */
8564FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8565{
8566 IEMOP_MNEMONIC("arpl Ew,Gw");
8567 IEMOP_HLP_MIN_286();
8568 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8570
8571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8572 {
8573 /* Register */
8574 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8575 IEM_MC_BEGIN(3, 0);
8576 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8577 IEM_MC_ARG(uint16_t, u16Src, 1);
8578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8579
8580 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8581 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8582 IEM_MC_REF_EFLAGS(pEFlags);
8583 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8584
8585 IEM_MC_ADVANCE_RIP();
8586 IEM_MC_END();
8587 }
8588 else
8589 {
8590 /* Memory */
8591 IEM_MC_BEGIN(3, 2);
8592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8593 IEM_MC_ARG(uint16_t, u16Src, 1);
8594 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8596
8597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8598 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8599 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8600 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8601 IEM_MC_FETCH_EFLAGS(EFlags);
8602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8603
8604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8605 IEM_MC_COMMIT_EFLAGS(EFlags);
8606 IEM_MC_ADVANCE_RIP();
8607 IEM_MC_END();
8608 }
8609 return VINF_SUCCESS;
8610
8611}
8612
8613
8614/** Opcode 0x63.
8615 * @note This is a weird one. It works like a regular move instruction if
8616 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8617 * @todo This definitely needs a testcase to verify the odd cases. */
8618FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8619{
8620 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8621
8622 IEMOP_MNEMONIC("movsxd Gv,Ev");
8623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8624
8625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8626 {
8627 /*
8628 * Register to register.
8629 */
8630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8631 IEM_MC_BEGIN(0, 1);
8632 IEM_MC_LOCAL(uint64_t, u64Value);
8633 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8634 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8635 IEM_MC_ADVANCE_RIP();
8636 IEM_MC_END();
8637 }
8638 else
8639 {
8640 /*
8641 * We're loading a register from memory.
8642 */
8643 IEM_MC_BEGIN(0, 2);
8644 IEM_MC_LOCAL(uint64_t, u64Value);
8645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8648 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8649 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8650 IEM_MC_ADVANCE_RIP();
8651 IEM_MC_END();
8652 }
8653 return VINF_SUCCESS;
8654}
8655
8656
8657/** Opcode 0x64. */
8658FNIEMOP_DEF(iemOp_seg_FS)
8659{
8660 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8661 IEMOP_HLP_MIN_386();
8662
8663 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8664 pIemCpu->iEffSeg = X86_SREG_FS;
8665
8666 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8667 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8668}
8669
8670
8671/** Opcode 0x65. */
8672FNIEMOP_DEF(iemOp_seg_GS)
8673{
8674 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8675 IEMOP_HLP_MIN_386();
8676
8677 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8678 pIemCpu->iEffSeg = X86_SREG_GS;
8679
8680 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8681 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8682}
8683
8684
8685/** Opcode 0x66. */
8686FNIEMOP_DEF(iemOp_op_size)
8687{
8688 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8689 IEMOP_HLP_MIN_386();
8690
8691 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8692 iemRecalEffOpSize(pIemCpu);
8693
8694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8696}
8697
8698
8699/** Opcode 0x67. */
8700FNIEMOP_DEF(iemOp_addr_size)
8701{
8702 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8703 IEMOP_HLP_MIN_386();
8704
8705 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8706 switch (pIemCpu->enmDefAddrMode)
8707 {
8708 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8709 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8710 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8711 default: AssertFailed();
8712 }
8713
8714 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8715 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8716}
8717
8718
8719/** Opcode 0x68. */
8720FNIEMOP_DEF(iemOp_push_Iz)
8721{
8722 IEMOP_MNEMONIC("push Iz");
8723 IEMOP_HLP_MIN_186();
8724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8725 switch (pIemCpu->enmEffOpSize)
8726 {
8727 case IEMMODE_16BIT:
8728 {
8729 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8730 IEMOP_HLP_NO_LOCK_PREFIX();
8731 IEM_MC_BEGIN(0,0);
8732 IEM_MC_PUSH_U16(u16Imm);
8733 IEM_MC_ADVANCE_RIP();
8734 IEM_MC_END();
8735 return VINF_SUCCESS;
8736 }
8737
8738 case IEMMODE_32BIT:
8739 {
8740 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8741 IEMOP_HLP_NO_LOCK_PREFIX();
8742 IEM_MC_BEGIN(0,0);
8743 IEM_MC_PUSH_U32(u32Imm);
8744 IEM_MC_ADVANCE_RIP();
8745 IEM_MC_END();
8746 return VINF_SUCCESS;
8747 }
8748
8749 case IEMMODE_64BIT:
8750 {
8751 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8752 IEMOP_HLP_NO_LOCK_PREFIX();
8753 IEM_MC_BEGIN(0,0);
8754 IEM_MC_PUSH_U64(u64Imm);
8755 IEM_MC_ADVANCE_RIP();
8756 IEM_MC_END();
8757 return VINF_SUCCESS;
8758 }
8759
8760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8761 }
8762}
8763
8764
8765/** Opcode 0x69. */
8766FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8767{
8768 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8769 IEMOP_HLP_MIN_186();
8770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8771 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8772
8773 switch (pIemCpu->enmEffOpSize)
8774 {
8775 case IEMMODE_16BIT:
8776 {
8777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8778 {
8779 /* register operand */
8780 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8782
8783 IEM_MC_BEGIN(3, 1);
8784 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8785 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8787 IEM_MC_LOCAL(uint16_t, u16Tmp);
8788
8789 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8790 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8791 IEM_MC_REF_EFLAGS(pEFlags);
8792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8793 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8794
8795 IEM_MC_ADVANCE_RIP();
8796 IEM_MC_END();
8797 }
8798 else
8799 {
8800 /* memory operand */
8801 IEM_MC_BEGIN(3, 2);
8802 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8803 IEM_MC_ARG(uint16_t, u16Src, 1);
8804 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8805 IEM_MC_LOCAL(uint16_t, u16Tmp);
8806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8807
8808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8809 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8810 IEM_MC_ASSIGN(u16Src, u16Imm);
8811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8812 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8813 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8814 IEM_MC_REF_EFLAGS(pEFlags);
8815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8816 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8817
8818 IEM_MC_ADVANCE_RIP();
8819 IEM_MC_END();
8820 }
8821 return VINF_SUCCESS;
8822 }
8823
8824 case IEMMODE_32BIT:
8825 {
8826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8827 {
8828 /* register operand */
8829 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8831
8832 IEM_MC_BEGIN(3, 1);
8833 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8834 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8835 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8836 IEM_MC_LOCAL(uint32_t, u32Tmp);
8837
8838 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8839 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8840 IEM_MC_REF_EFLAGS(pEFlags);
8841 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8842 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8843
8844 IEM_MC_ADVANCE_RIP();
8845 IEM_MC_END();
8846 }
8847 else
8848 {
8849 /* memory operand */
8850 IEM_MC_BEGIN(3, 2);
8851 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8852 IEM_MC_ARG(uint32_t, u32Src, 1);
8853 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8854 IEM_MC_LOCAL(uint32_t, u32Tmp);
8855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8856
8857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8858 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8859 IEM_MC_ASSIGN(u32Src, u32Imm);
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8862 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8863 IEM_MC_REF_EFLAGS(pEFlags);
8864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8865 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8866
8867 IEM_MC_ADVANCE_RIP();
8868 IEM_MC_END();
8869 }
8870 return VINF_SUCCESS;
8871 }
8872
8873 case IEMMODE_64BIT:
8874 {
8875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8876 {
8877 /* register operand */
8878 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8880
8881 IEM_MC_BEGIN(3, 1);
8882 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8883 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8885 IEM_MC_LOCAL(uint64_t, u64Tmp);
8886
8887 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8888 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8889 IEM_MC_REF_EFLAGS(pEFlags);
8890 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8891 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8892
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 }
8896 else
8897 {
8898 /* memory operand */
8899 IEM_MC_BEGIN(3, 2);
8900 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8901 IEM_MC_ARG(uint64_t, u64Src, 1);
8902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8903 IEM_MC_LOCAL(uint64_t, u64Tmp);
8904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8905
8906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8907 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8908 IEM_MC_ASSIGN(u64Src, u64Imm);
8909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8910 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8911 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8912 IEM_MC_REF_EFLAGS(pEFlags);
8913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8914 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8915
8916 IEM_MC_ADVANCE_RIP();
8917 IEM_MC_END();
8918 }
8919 return VINF_SUCCESS;
8920 }
8921 }
8922 AssertFailedReturn(VERR_IEM_IPE_9);
8923}
8924
8925
8926/** Opcode 0x6a. */
8927FNIEMOP_DEF(iemOp_push_Ib)
8928{
8929 IEMOP_MNEMONIC("push Ib");
8930 IEMOP_HLP_MIN_186();
8931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8932 IEMOP_HLP_NO_LOCK_PREFIX();
8933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8934
8935 IEM_MC_BEGIN(0,0);
8936 switch (pIemCpu->enmEffOpSize)
8937 {
8938 case IEMMODE_16BIT:
8939 IEM_MC_PUSH_U16(i8Imm);
8940 break;
8941 case IEMMODE_32BIT:
8942 IEM_MC_PUSH_U32(i8Imm);
8943 break;
8944 case IEMMODE_64BIT:
8945 IEM_MC_PUSH_U64(i8Imm);
8946 break;
8947 }
8948 IEM_MC_ADVANCE_RIP();
8949 IEM_MC_END();
8950 return VINF_SUCCESS;
8951}
8952
8953
8954/** Opcode 0x6b. */
8955FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8956{
8957 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8958 IEMOP_HLP_MIN_186();
8959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8961
8962 switch (pIemCpu->enmEffOpSize)
8963 {
8964 case IEMMODE_16BIT:
8965 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8966 {
8967 /* register operand */
8968 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8970
8971 IEM_MC_BEGIN(3, 1);
8972 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8973 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8974 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8975 IEM_MC_LOCAL(uint16_t, u16Tmp);
8976
8977 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8978 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8979 IEM_MC_REF_EFLAGS(pEFlags);
8980 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8981 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8982
8983 IEM_MC_ADVANCE_RIP();
8984 IEM_MC_END();
8985 }
8986 else
8987 {
8988 /* memory operand */
8989 IEM_MC_BEGIN(3, 2);
8990 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8991 IEM_MC_ARG(uint16_t, u16Src, 1);
8992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8993 IEM_MC_LOCAL(uint16_t, u16Tmp);
8994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8995
8996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8997 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8998 IEM_MC_ASSIGN(u16Src, u16Imm);
8999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9000 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9001 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9002 IEM_MC_REF_EFLAGS(pEFlags);
9003 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9004 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
9005
9006 IEM_MC_ADVANCE_RIP();
9007 IEM_MC_END();
9008 }
9009 return VINF_SUCCESS;
9010
9011 case IEMMODE_32BIT:
9012 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9013 {
9014 /* register operand */
9015 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9017
9018 IEM_MC_BEGIN(3, 1);
9019 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9020 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9021 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9022 IEM_MC_LOCAL(uint32_t, u32Tmp);
9023
9024 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9025 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9026 IEM_MC_REF_EFLAGS(pEFlags);
9027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9028 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9029
9030 IEM_MC_ADVANCE_RIP();
9031 IEM_MC_END();
9032 }
9033 else
9034 {
9035 /* memory operand */
9036 IEM_MC_BEGIN(3, 2);
9037 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9038 IEM_MC_ARG(uint32_t, u32Src, 1);
9039 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9040 IEM_MC_LOCAL(uint32_t, u32Tmp);
9041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9042
9043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9044 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9045 IEM_MC_ASSIGN(u32Src, u32Imm);
9046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9047 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9048 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9049 IEM_MC_REF_EFLAGS(pEFlags);
9050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9051 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9052
9053 IEM_MC_ADVANCE_RIP();
9054 IEM_MC_END();
9055 }
9056 return VINF_SUCCESS;
9057
9058 case IEMMODE_64BIT:
9059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9060 {
9061 /* register operand */
9062 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9064
9065 IEM_MC_BEGIN(3, 1);
9066 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9067 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9068 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9069 IEM_MC_LOCAL(uint64_t, u64Tmp);
9070
9071 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9072 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9073 IEM_MC_REF_EFLAGS(pEFlags);
9074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9075 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9076
9077 IEM_MC_ADVANCE_RIP();
9078 IEM_MC_END();
9079 }
9080 else
9081 {
9082 /* memory operand */
9083 IEM_MC_BEGIN(3, 2);
9084 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9085 IEM_MC_ARG(uint64_t, u64Src, 1);
9086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9087 IEM_MC_LOCAL(uint64_t, u64Tmp);
9088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9089
9090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9091 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9092 IEM_MC_ASSIGN(u64Src, u64Imm);
9093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9094 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9095 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9096 IEM_MC_REF_EFLAGS(pEFlags);
9097 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9098 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9099
9100 IEM_MC_ADVANCE_RIP();
9101 IEM_MC_END();
9102 }
9103 return VINF_SUCCESS;
9104 }
9105 AssertFailedReturn(VERR_IEM_IPE_8);
9106}
9107
9108
9109/** Opcode 0x6c. */
9110FNIEMOP_DEF(iemOp_insb_Yb_DX)
9111{
9112 IEMOP_HLP_MIN_186();
9113 IEMOP_HLP_NO_LOCK_PREFIX();
9114 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9115 {
9116 IEMOP_MNEMONIC("rep ins Yb,DX");
9117 switch (pIemCpu->enmEffAddrMode)
9118 {
9119 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9120 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9121 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9123 }
9124 }
9125 else
9126 {
9127 IEMOP_MNEMONIC("ins Yb,DX");
9128 switch (pIemCpu->enmEffAddrMode)
9129 {
9130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9134 }
9135 }
9136}
9137
9138
9139/** Opcode 0x6d. */
9140FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9141{
9142 IEMOP_HLP_MIN_186();
9143 IEMOP_HLP_NO_LOCK_PREFIX();
9144 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9145 {
9146 IEMOP_MNEMONIC("rep ins Yv,DX");
9147 switch (pIemCpu->enmEffOpSize)
9148 {
9149 case IEMMODE_16BIT:
9150 switch (pIemCpu->enmEffAddrMode)
9151 {
9152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9156 }
9157 break;
9158 case IEMMODE_64BIT:
9159 case IEMMODE_32BIT:
9160 switch (pIemCpu->enmEffAddrMode)
9161 {
9162 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9166 }
9167 break;
9168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9169 }
9170 }
9171 else
9172 {
9173 IEMOP_MNEMONIC("ins Yv,DX");
9174 switch (pIemCpu->enmEffOpSize)
9175 {
9176 case IEMMODE_16BIT:
9177 switch (pIemCpu->enmEffAddrMode)
9178 {
9179 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9180 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9181 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9183 }
9184 break;
9185 case IEMMODE_64BIT:
9186 case IEMMODE_32BIT:
9187 switch (pIemCpu->enmEffAddrMode)
9188 {
9189 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9190 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9191 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9193 }
9194 break;
9195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9196 }
9197 }
9198}
9199
9200
9201/** Opcode 0x6e. */
9202FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9203{
9204 IEMOP_HLP_MIN_186();
9205 IEMOP_HLP_NO_LOCK_PREFIX();
9206 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9207 {
9208 IEMOP_MNEMONIC("rep outs DX,Yb");
9209 switch (pIemCpu->enmEffAddrMode)
9210 {
9211 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9212 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9213 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9215 }
9216 }
9217 else
9218 {
9219 IEMOP_MNEMONIC("outs DX,Yb");
9220 switch (pIemCpu->enmEffAddrMode)
9221 {
9222 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9223 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9224 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9226 }
9227 }
9228}
9229
9230
9231/** Opcode 0x6f. */
9232FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9233{
9234 IEMOP_HLP_MIN_186();
9235 IEMOP_HLP_NO_LOCK_PREFIX();
9236 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9237 {
9238 IEMOP_MNEMONIC("rep outs DX,Yv");
9239 switch (pIemCpu->enmEffOpSize)
9240 {
9241 case IEMMODE_16BIT:
9242 switch (pIemCpu->enmEffAddrMode)
9243 {
9244 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9245 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9246 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9248 }
9249 break;
9250 case IEMMODE_64BIT:
9251 case IEMMODE_32BIT:
9252 switch (pIemCpu->enmEffAddrMode)
9253 {
9254 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9255 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9256 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9258 }
9259 break;
9260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9261 }
9262 }
9263 else
9264 {
9265 IEMOP_MNEMONIC("outs DX,Yv");
9266 switch (pIemCpu->enmEffOpSize)
9267 {
9268 case IEMMODE_16BIT:
9269 switch (pIemCpu->enmEffAddrMode)
9270 {
9271 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9272 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9273 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9275 }
9276 break;
9277 case IEMMODE_64BIT:
9278 case IEMMODE_32BIT:
9279 switch (pIemCpu->enmEffAddrMode)
9280 {
9281 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9282 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9283 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9285 }
9286 break;
9287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9288 }
9289 }
9290}
9291
9292
9293/** Opcode 0x70. */
9294FNIEMOP_DEF(iemOp_jo_Jb)
9295{
9296 IEMOP_MNEMONIC("jo Jb");
9297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9298 IEMOP_HLP_NO_LOCK_PREFIX();
9299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9300
9301 IEM_MC_BEGIN(0, 0);
9302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9303 IEM_MC_REL_JMP_S8(i8Imm);
9304 } IEM_MC_ELSE() {
9305 IEM_MC_ADVANCE_RIP();
9306 } IEM_MC_ENDIF();
9307 IEM_MC_END();
9308 return VINF_SUCCESS;
9309}
9310
9311
9312/** Opcode 0x71. */
9313FNIEMOP_DEF(iemOp_jno_Jb)
9314{
9315 IEMOP_MNEMONIC("jno Jb");
9316 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9317 IEMOP_HLP_NO_LOCK_PREFIX();
9318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9319
9320 IEM_MC_BEGIN(0, 0);
9321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9322 IEM_MC_ADVANCE_RIP();
9323 } IEM_MC_ELSE() {
9324 IEM_MC_REL_JMP_S8(i8Imm);
9325 } IEM_MC_ENDIF();
9326 IEM_MC_END();
9327 return VINF_SUCCESS;
9328}
9329
9330/** Opcode 0x72. */
9331FNIEMOP_DEF(iemOp_jc_Jb)
9332{
9333 IEMOP_MNEMONIC("jc/jnae Jb");
9334 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9335 IEMOP_HLP_NO_LOCK_PREFIX();
9336 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9337
9338 IEM_MC_BEGIN(0, 0);
9339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9340 IEM_MC_REL_JMP_S8(i8Imm);
9341 } IEM_MC_ELSE() {
9342 IEM_MC_ADVANCE_RIP();
9343 } IEM_MC_ENDIF();
9344 IEM_MC_END();
9345 return VINF_SUCCESS;
9346}
9347
9348
9349/** Opcode 0x73. */
9350FNIEMOP_DEF(iemOp_jnc_Jb)
9351{
9352 IEMOP_MNEMONIC("jnc/jnb Jb");
9353 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9354 IEMOP_HLP_NO_LOCK_PREFIX();
9355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9356
9357 IEM_MC_BEGIN(0, 0);
9358 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9359 IEM_MC_ADVANCE_RIP();
9360 } IEM_MC_ELSE() {
9361 IEM_MC_REL_JMP_S8(i8Imm);
9362 } IEM_MC_ENDIF();
9363 IEM_MC_END();
9364 return VINF_SUCCESS;
9365}
9366
9367
9368/** Opcode 0x74. */
9369FNIEMOP_DEF(iemOp_je_Jb)
9370{
9371 IEMOP_MNEMONIC("je/jz Jb");
9372 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9373 IEMOP_HLP_NO_LOCK_PREFIX();
9374 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9375
9376 IEM_MC_BEGIN(0, 0);
9377 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9378 IEM_MC_REL_JMP_S8(i8Imm);
9379 } IEM_MC_ELSE() {
9380 IEM_MC_ADVANCE_RIP();
9381 } IEM_MC_ENDIF();
9382 IEM_MC_END();
9383 return VINF_SUCCESS;
9384}
9385
9386
9387/** Opcode 0x75. */
9388FNIEMOP_DEF(iemOp_jne_Jb)
9389{
9390 IEMOP_MNEMONIC("jne/jnz Jb");
9391 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9392 IEMOP_HLP_NO_LOCK_PREFIX();
9393 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9394
9395 IEM_MC_BEGIN(0, 0);
9396 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9397 IEM_MC_ADVANCE_RIP();
9398 } IEM_MC_ELSE() {
9399 IEM_MC_REL_JMP_S8(i8Imm);
9400 } IEM_MC_ENDIF();
9401 IEM_MC_END();
9402 return VINF_SUCCESS;
9403}
9404
9405
9406/** Opcode 0x76. */
9407FNIEMOP_DEF(iemOp_jbe_Jb)
9408{
9409 IEMOP_MNEMONIC("jbe/jna Jb");
9410 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9411 IEMOP_HLP_NO_LOCK_PREFIX();
9412 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9413
9414 IEM_MC_BEGIN(0, 0);
9415 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9416 IEM_MC_REL_JMP_S8(i8Imm);
9417 } IEM_MC_ELSE() {
9418 IEM_MC_ADVANCE_RIP();
9419 } IEM_MC_ENDIF();
9420 IEM_MC_END();
9421 return VINF_SUCCESS;
9422}
9423
9424
9425/** Opcode 0x77. */
9426FNIEMOP_DEF(iemOp_jnbe_Jb)
9427{
9428 IEMOP_MNEMONIC("jnbe/ja Jb");
9429 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9430 IEMOP_HLP_NO_LOCK_PREFIX();
9431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9432
9433 IEM_MC_BEGIN(0, 0);
9434 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9435 IEM_MC_ADVANCE_RIP();
9436 } IEM_MC_ELSE() {
9437 IEM_MC_REL_JMP_S8(i8Imm);
9438 } IEM_MC_ENDIF();
9439 IEM_MC_END();
9440 return VINF_SUCCESS;
9441}
9442
9443
9444/** Opcode 0x78. */
9445FNIEMOP_DEF(iemOp_js_Jb)
9446{
9447 IEMOP_MNEMONIC("js Jb");
9448 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9449 IEMOP_HLP_NO_LOCK_PREFIX();
9450 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9451
9452 IEM_MC_BEGIN(0, 0);
9453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9454 IEM_MC_REL_JMP_S8(i8Imm);
9455 } IEM_MC_ELSE() {
9456 IEM_MC_ADVANCE_RIP();
9457 } IEM_MC_ENDIF();
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460}
9461
9462
9463/** Opcode 0x79. */
9464FNIEMOP_DEF(iemOp_jns_Jb)
9465{
9466 IEMOP_MNEMONIC("jns Jb");
9467 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9468 IEMOP_HLP_NO_LOCK_PREFIX();
9469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9470
9471 IEM_MC_BEGIN(0, 0);
9472 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9473 IEM_MC_ADVANCE_RIP();
9474 } IEM_MC_ELSE() {
9475 IEM_MC_REL_JMP_S8(i8Imm);
9476 } IEM_MC_ENDIF();
9477 IEM_MC_END();
9478 return VINF_SUCCESS;
9479}
9480
9481
9482/** Opcode 0x7a. */
9483FNIEMOP_DEF(iemOp_jp_Jb)
9484{
9485 IEMOP_MNEMONIC("jp Jb");
9486 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9487 IEMOP_HLP_NO_LOCK_PREFIX();
9488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9489
9490 IEM_MC_BEGIN(0, 0);
9491 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9492 IEM_MC_REL_JMP_S8(i8Imm);
9493 } IEM_MC_ELSE() {
9494 IEM_MC_ADVANCE_RIP();
9495 } IEM_MC_ENDIF();
9496 IEM_MC_END();
9497 return VINF_SUCCESS;
9498}
9499
9500
9501/** Opcode 0x7b. */
9502FNIEMOP_DEF(iemOp_jnp_Jb)
9503{
9504 IEMOP_MNEMONIC("jnp Jb");
9505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9506 IEMOP_HLP_NO_LOCK_PREFIX();
9507 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9508
9509 IEM_MC_BEGIN(0, 0);
9510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9511 IEM_MC_ADVANCE_RIP();
9512 } IEM_MC_ELSE() {
9513 IEM_MC_REL_JMP_S8(i8Imm);
9514 } IEM_MC_ENDIF();
9515 IEM_MC_END();
9516 return VINF_SUCCESS;
9517}
9518
9519
9520/** Opcode 0x7c. */
9521FNIEMOP_DEF(iemOp_jl_Jb)
9522{
9523 IEMOP_MNEMONIC("jl/jnge Jb");
9524 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9525 IEMOP_HLP_NO_LOCK_PREFIX();
9526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9527
9528 IEM_MC_BEGIN(0, 0);
9529 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9530 IEM_MC_REL_JMP_S8(i8Imm);
9531 } IEM_MC_ELSE() {
9532 IEM_MC_ADVANCE_RIP();
9533 } IEM_MC_ENDIF();
9534 IEM_MC_END();
9535 return VINF_SUCCESS;
9536}
9537
9538
9539/** Opcode 0x7d. */
9540FNIEMOP_DEF(iemOp_jnl_Jb)
9541{
9542 IEMOP_MNEMONIC("jnl/jge Jb");
9543 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9544 IEMOP_HLP_NO_LOCK_PREFIX();
9545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9546
9547 IEM_MC_BEGIN(0, 0);
9548 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9549 IEM_MC_ADVANCE_RIP();
9550 } IEM_MC_ELSE() {
9551 IEM_MC_REL_JMP_S8(i8Imm);
9552 } IEM_MC_ENDIF();
9553 IEM_MC_END();
9554 return VINF_SUCCESS;
9555}
9556
9557
9558/** Opcode 0x7e. */
9559FNIEMOP_DEF(iemOp_jle_Jb)
9560{
9561 IEMOP_MNEMONIC("jle/jng Jb");
9562 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9563 IEMOP_HLP_NO_LOCK_PREFIX();
9564 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9565
9566 IEM_MC_BEGIN(0, 0);
9567 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9568 IEM_MC_REL_JMP_S8(i8Imm);
9569 } IEM_MC_ELSE() {
9570 IEM_MC_ADVANCE_RIP();
9571 } IEM_MC_ENDIF();
9572 IEM_MC_END();
9573 return VINF_SUCCESS;
9574}
9575
9576
9577/** Opcode 0x7f. */
9578FNIEMOP_DEF(iemOp_jnle_Jb)
9579{
9580 IEMOP_MNEMONIC("jnle/jg Jb");
9581 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9582 IEMOP_HLP_NO_LOCK_PREFIX();
9583 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9584
9585 IEM_MC_BEGIN(0, 0);
9586 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9587 IEM_MC_ADVANCE_RIP();
9588 } IEM_MC_ELSE() {
9589 IEM_MC_REL_JMP_S8(i8Imm);
9590 } IEM_MC_ENDIF();
9591 IEM_MC_END();
9592 return VINF_SUCCESS;
9593}
9594
9595
9596/** Opcode 0x80. */
9597FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9598{
9599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9600 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9601 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9602
9603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9604 {
9605 /* register target */
9606 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9607 IEMOP_HLP_NO_LOCK_PREFIX();
9608 IEM_MC_BEGIN(3, 0);
9609 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9610 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9612
9613 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9614 IEM_MC_REF_EFLAGS(pEFlags);
9615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9616
9617 IEM_MC_ADVANCE_RIP();
9618 IEM_MC_END();
9619 }
9620 else
9621 {
9622 /* memory target */
9623 uint32_t fAccess;
9624 if (pImpl->pfnLockedU8)
9625 fAccess = IEM_ACCESS_DATA_RW;
9626 else
9627 { /* CMP */
9628 IEMOP_HLP_NO_LOCK_PREFIX();
9629 fAccess = IEM_ACCESS_DATA_R;
9630 }
9631 IEM_MC_BEGIN(3, 2);
9632 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9633 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9635
9636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9637 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9638 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9639
9640 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9641 IEM_MC_FETCH_EFLAGS(EFlags);
9642 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9644 else
9645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9646
9647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9648 IEM_MC_COMMIT_EFLAGS(EFlags);
9649 IEM_MC_ADVANCE_RIP();
9650 IEM_MC_END();
9651 }
9652 return VINF_SUCCESS;
9653}
9654
9655
9656/** Opcode 0x81. */
9657FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9658{
9659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9660 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9661 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9662
9663 switch (pIemCpu->enmEffOpSize)
9664 {
9665 case IEMMODE_16BIT:
9666 {
9667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9668 {
9669 /* register target */
9670 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9671 IEMOP_HLP_NO_LOCK_PREFIX();
9672 IEM_MC_BEGIN(3, 0);
9673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9674 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9675 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9676
9677 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9678 IEM_MC_REF_EFLAGS(pEFlags);
9679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9680
9681 IEM_MC_ADVANCE_RIP();
9682 IEM_MC_END();
9683 }
9684 else
9685 {
9686 /* memory target */
9687 uint32_t fAccess;
9688 if (pImpl->pfnLockedU16)
9689 fAccess = IEM_ACCESS_DATA_RW;
9690 else
9691 { /* CMP, TEST */
9692 IEMOP_HLP_NO_LOCK_PREFIX();
9693 fAccess = IEM_ACCESS_DATA_R;
9694 }
9695 IEM_MC_BEGIN(3, 2);
9696 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9697 IEM_MC_ARG(uint16_t, u16Src, 1);
9698 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9700
9701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9702 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9703 IEM_MC_ASSIGN(u16Src, u16Imm);
9704 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9705 IEM_MC_FETCH_EFLAGS(EFlags);
9706 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9708 else
9709 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9710
9711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9712 IEM_MC_COMMIT_EFLAGS(EFlags);
9713 IEM_MC_ADVANCE_RIP();
9714 IEM_MC_END();
9715 }
9716 break;
9717 }
9718
9719 case IEMMODE_32BIT:
9720 {
9721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9722 {
9723 /* register target */
9724 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9725 IEMOP_HLP_NO_LOCK_PREFIX();
9726 IEM_MC_BEGIN(3, 0);
9727 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9728 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9729 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9730
9731 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9732 IEM_MC_REF_EFLAGS(pEFlags);
9733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9734 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9735
9736 IEM_MC_ADVANCE_RIP();
9737 IEM_MC_END();
9738 }
9739 else
9740 {
9741 /* memory target */
9742 uint32_t fAccess;
9743 if (pImpl->pfnLockedU32)
9744 fAccess = IEM_ACCESS_DATA_RW;
9745 else
9746 { /* CMP, TEST */
9747 IEMOP_HLP_NO_LOCK_PREFIX();
9748 fAccess = IEM_ACCESS_DATA_R;
9749 }
9750 IEM_MC_BEGIN(3, 2);
9751 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9752 IEM_MC_ARG(uint32_t, u32Src, 1);
9753 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9755
9756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9757 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9758 IEM_MC_ASSIGN(u32Src, u32Imm);
9759 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9760 IEM_MC_FETCH_EFLAGS(EFlags);
9761 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9763 else
9764 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9765
9766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9767 IEM_MC_COMMIT_EFLAGS(EFlags);
9768 IEM_MC_ADVANCE_RIP();
9769 IEM_MC_END();
9770 }
9771 break;
9772 }
9773
9774 case IEMMODE_64BIT:
9775 {
9776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9777 {
9778 /* register target */
9779 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9780 IEMOP_HLP_NO_LOCK_PREFIX();
9781 IEM_MC_BEGIN(3, 0);
9782 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9783 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9784 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9785
9786 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9787 IEM_MC_REF_EFLAGS(pEFlags);
9788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9789
9790 IEM_MC_ADVANCE_RIP();
9791 IEM_MC_END();
9792 }
9793 else
9794 {
9795 /* memory target */
9796 uint32_t fAccess;
9797 if (pImpl->pfnLockedU64)
9798 fAccess = IEM_ACCESS_DATA_RW;
9799 else
9800 { /* CMP */
9801 IEMOP_HLP_NO_LOCK_PREFIX();
9802 fAccess = IEM_ACCESS_DATA_R;
9803 }
9804 IEM_MC_BEGIN(3, 2);
9805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9806 IEM_MC_ARG(uint64_t, u64Src, 1);
9807 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9809
9810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9811 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9812 IEM_MC_ASSIGN(u64Src, u64Imm);
9813 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9814 IEM_MC_FETCH_EFLAGS(EFlags);
9815 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9817 else
9818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9819
9820 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9821 IEM_MC_COMMIT_EFLAGS(EFlags);
9822 IEM_MC_ADVANCE_RIP();
9823 IEM_MC_END();
9824 }
9825 break;
9826 }
9827 }
9828 return VINF_SUCCESS;
9829}
9830
9831
9832/** Opcode 0x82. */
9833FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9834{
9835 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9836 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9837}
9838
9839
9840/** Opcode 0x83. */
9841FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9842{
9843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9844 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9845 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9846 to the 386 even if absent in the intel reference manuals and some
9847 3rd party opcode listings. */
9848 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9849
9850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9851 {
9852 /*
9853 * Register target
9854 */
9855 IEMOP_HLP_NO_LOCK_PREFIX();
9856 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9857 switch (pIemCpu->enmEffOpSize)
9858 {
9859 case IEMMODE_16BIT:
9860 {
9861 IEM_MC_BEGIN(3, 0);
9862 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9863 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9864 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9865
9866 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9867 IEM_MC_REF_EFLAGS(pEFlags);
9868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9869
9870 IEM_MC_ADVANCE_RIP();
9871 IEM_MC_END();
9872 break;
9873 }
9874
9875 case IEMMODE_32BIT:
9876 {
9877 IEM_MC_BEGIN(3, 0);
9878 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9879 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9880 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9881
9882 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9883 IEM_MC_REF_EFLAGS(pEFlags);
9884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9885 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9886
9887 IEM_MC_ADVANCE_RIP();
9888 IEM_MC_END();
9889 break;
9890 }
9891
9892 case IEMMODE_64BIT:
9893 {
9894 IEM_MC_BEGIN(3, 0);
9895 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9896 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9897 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9898
9899 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9900 IEM_MC_REF_EFLAGS(pEFlags);
9901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9902
9903 IEM_MC_ADVANCE_RIP();
9904 IEM_MC_END();
9905 break;
9906 }
9907 }
9908 }
9909 else
9910 {
9911 /*
9912 * Memory target.
9913 */
9914 uint32_t fAccess;
9915 if (pImpl->pfnLockedU16)
9916 fAccess = IEM_ACCESS_DATA_RW;
9917 else
9918 { /* CMP */
9919 IEMOP_HLP_NO_LOCK_PREFIX();
9920 fAccess = IEM_ACCESS_DATA_R;
9921 }
9922
9923 switch (pIemCpu->enmEffOpSize)
9924 {
9925 case IEMMODE_16BIT:
9926 {
9927 IEM_MC_BEGIN(3, 2);
9928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9929 IEM_MC_ARG(uint16_t, u16Src, 1);
9930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9932
9933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9934 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9935 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9936 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9937 IEM_MC_FETCH_EFLAGS(EFlags);
9938 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9940 else
9941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9942
9943 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9944 IEM_MC_COMMIT_EFLAGS(EFlags);
9945 IEM_MC_ADVANCE_RIP();
9946 IEM_MC_END();
9947 break;
9948 }
9949
9950 case IEMMODE_32BIT:
9951 {
9952 IEM_MC_BEGIN(3, 2);
9953 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9954 IEM_MC_ARG(uint32_t, u32Src, 1);
9955 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9957
9958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9960 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9961 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9962 IEM_MC_FETCH_EFLAGS(EFlags);
9963 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9964 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9965 else
9966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9967
9968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9969 IEM_MC_COMMIT_EFLAGS(EFlags);
9970 IEM_MC_ADVANCE_RIP();
9971 IEM_MC_END();
9972 break;
9973 }
9974
9975 case IEMMODE_64BIT:
9976 {
9977 IEM_MC_BEGIN(3, 2);
9978 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9979 IEM_MC_ARG(uint64_t, u64Src, 1);
9980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9982
9983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9984 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9985 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9986 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9987 IEM_MC_FETCH_EFLAGS(EFlags);
9988 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9990 else
9991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9992
9993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9994 IEM_MC_COMMIT_EFLAGS(EFlags);
9995 IEM_MC_ADVANCE_RIP();
9996 IEM_MC_END();
9997 break;
9998 }
9999 }
10000 }
10001 return VINF_SUCCESS;
10002}
10003
10004
10005/** Opcode 0x84. */
10006FNIEMOP_DEF(iemOp_test_Eb_Gb)
10007{
10008 IEMOP_MNEMONIC("test Eb,Gb");
10009 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
10010 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10011 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10012}
10013
10014
10015/** Opcode 0x85. */
10016FNIEMOP_DEF(iemOp_test_Ev_Gv)
10017{
10018 IEMOP_MNEMONIC("test Ev,Gv");
10019 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
10020 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10021 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10022}
10023
10024
10025/** Opcode 0x86. */
10026FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10027{
10028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10029 IEMOP_MNEMONIC("xchg Eb,Gb");
10030
10031 /*
10032 * If rm is denoting a register, no more instruction bytes.
10033 */
10034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10035 {
10036 IEMOP_HLP_NO_LOCK_PREFIX();
10037
10038 IEM_MC_BEGIN(0, 2);
10039 IEM_MC_LOCAL(uint8_t, uTmp1);
10040 IEM_MC_LOCAL(uint8_t, uTmp2);
10041
10042 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10043 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10044 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10045 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10046
10047 IEM_MC_ADVANCE_RIP();
10048 IEM_MC_END();
10049 }
10050 else
10051 {
10052 /*
10053 * We're accessing memory.
10054 */
10055/** @todo the register must be committed separately! */
10056 IEM_MC_BEGIN(2, 2);
10057 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10058 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10060
10061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10062 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10063 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10064 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10066
10067 IEM_MC_ADVANCE_RIP();
10068 IEM_MC_END();
10069 }
10070 return VINF_SUCCESS;
10071}
10072
10073
10074/** Opcode 0x87. */
10075FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10076{
10077 IEMOP_MNEMONIC("xchg Ev,Gv");
10078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10079
10080 /*
10081 * If rm is denoting a register, no more instruction bytes.
10082 */
10083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10084 {
10085 IEMOP_HLP_NO_LOCK_PREFIX();
10086
10087 switch (pIemCpu->enmEffOpSize)
10088 {
10089 case IEMMODE_16BIT:
10090 IEM_MC_BEGIN(0, 2);
10091 IEM_MC_LOCAL(uint16_t, uTmp1);
10092 IEM_MC_LOCAL(uint16_t, uTmp2);
10093
10094 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10095 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10096 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10097 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10098
10099 IEM_MC_ADVANCE_RIP();
10100 IEM_MC_END();
10101 return VINF_SUCCESS;
10102
10103 case IEMMODE_32BIT:
10104 IEM_MC_BEGIN(0, 2);
10105 IEM_MC_LOCAL(uint32_t, uTmp1);
10106 IEM_MC_LOCAL(uint32_t, uTmp2);
10107
10108 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10109 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10110 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10111 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10112
10113 IEM_MC_ADVANCE_RIP();
10114 IEM_MC_END();
10115 return VINF_SUCCESS;
10116
10117 case IEMMODE_64BIT:
10118 IEM_MC_BEGIN(0, 2);
10119 IEM_MC_LOCAL(uint64_t, uTmp1);
10120 IEM_MC_LOCAL(uint64_t, uTmp2);
10121
10122 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10123 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10124 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10125 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10126
10127 IEM_MC_ADVANCE_RIP();
10128 IEM_MC_END();
10129 return VINF_SUCCESS;
10130
10131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10132 }
10133 }
10134 else
10135 {
10136 /*
10137 * We're accessing memory.
10138 */
10139 switch (pIemCpu->enmEffOpSize)
10140 {
10141/** @todo the register must be committed separately! */
10142 case IEMMODE_16BIT:
10143 IEM_MC_BEGIN(2, 2);
10144 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10145 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10147
10148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10149 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10150 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10151 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10152 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10153
10154 IEM_MC_ADVANCE_RIP();
10155 IEM_MC_END();
10156 return VINF_SUCCESS;
10157
10158 case IEMMODE_32BIT:
10159 IEM_MC_BEGIN(2, 2);
10160 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10161 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10163
10164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10165 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10166 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10167 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10169
10170 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10171 IEM_MC_ADVANCE_RIP();
10172 IEM_MC_END();
10173 return VINF_SUCCESS;
10174
10175 case IEMMODE_64BIT:
10176 IEM_MC_BEGIN(2, 2);
10177 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10178 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10180
10181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10182 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10183 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10184 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10185 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10186
10187 IEM_MC_ADVANCE_RIP();
10188 IEM_MC_END();
10189 return VINF_SUCCESS;
10190
10191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10192 }
10193 }
10194}
10195
10196
10197/** Opcode 0x88. */
10198FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10199{
10200 IEMOP_MNEMONIC("mov Eb,Gb");
10201
10202 uint8_t bRm;
10203 IEM_OPCODE_GET_NEXT_U8(&bRm);
10204 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10205
10206 /*
10207 * If rm is denoting a register, no more instruction bytes.
10208 */
10209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10210 {
10211 IEM_MC_BEGIN(0, 1);
10212 IEM_MC_LOCAL(uint8_t, u8Value);
10213 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10214 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10215 IEM_MC_ADVANCE_RIP();
10216 IEM_MC_END();
10217 }
10218 else
10219 {
10220 /*
10221 * We're writing a register to memory.
10222 */
10223 IEM_MC_BEGIN(0, 2);
10224 IEM_MC_LOCAL(uint8_t, u8Value);
10225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10227 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10228 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10229 IEM_MC_ADVANCE_RIP();
10230 IEM_MC_END();
10231 }
10232 return VINF_SUCCESS;
10233
10234}
10235
10236
10237/** Opcode 0x89. */
10238FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10239{
10240 IEMOP_MNEMONIC("mov Ev,Gv");
10241
10242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10243 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10244
10245 /*
10246 * If rm is denoting a register, no more instruction bytes.
10247 */
10248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10249 {
10250 switch (pIemCpu->enmEffOpSize)
10251 {
10252 case IEMMODE_16BIT:
10253 IEM_MC_BEGIN(0, 1);
10254 IEM_MC_LOCAL(uint16_t, u16Value);
10255 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10256 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10257 IEM_MC_ADVANCE_RIP();
10258 IEM_MC_END();
10259 break;
10260
10261 case IEMMODE_32BIT:
10262 IEM_MC_BEGIN(0, 1);
10263 IEM_MC_LOCAL(uint32_t, u32Value);
10264 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10265 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10266 IEM_MC_ADVANCE_RIP();
10267 IEM_MC_END();
10268 break;
10269
10270 case IEMMODE_64BIT:
10271 IEM_MC_BEGIN(0, 1);
10272 IEM_MC_LOCAL(uint64_t, u64Value);
10273 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10274 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10275 IEM_MC_ADVANCE_RIP();
10276 IEM_MC_END();
10277 break;
10278 }
10279 }
10280 else
10281 {
10282 /*
10283 * We're writing a register to memory.
10284 */
10285 switch (pIemCpu->enmEffOpSize)
10286 {
10287 case IEMMODE_16BIT:
10288 IEM_MC_BEGIN(0, 2);
10289 IEM_MC_LOCAL(uint16_t, u16Value);
10290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10292 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10293 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10294 IEM_MC_ADVANCE_RIP();
10295 IEM_MC_END();
10296 break;
10297
10298 case IEMMODE_32BIT:
10299 IEM_MC_BEGIN(0, 2);
10300 IEM_MC_LOCAL(uint32_t, u32Value);
10301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10303 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10304 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10305 IEM_MC_ADVANCE_RIP();
10306 IEM_MC_END();
10307 break;
10308
10309 case IEMMODE_64BIT:
10310 IEM_MC_BEGIN(0, 2);
10311 IEM_MC_LOCAL(uint64_t, u64Value);
10312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10314 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10315 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10316 IEM_MC_ADVANCE_RIP();
10317 IEM_MC_END();
10318 break;
10319 }
10320 }
10321 return VINF_SUCCESS;
10322}
10323
10324
10325/** Opcode 0x8a. */
10326FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10327{
10328 IEMOP_MNEMONIC("mov Gb,Eb");
10329
10330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10331 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10332
10333 /*
10334 * If rm is denoting a register, no more instruction bytes.
10335 */
10336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10337 {
10338 IEM_MC_BEGIN(0, 1);
10339 IEM_MC_LOCAL(uint8_t, u8Value);
10340 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10341 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10342 IEM_MC_ADVANCE_RIP();
10343 IEM_MC_END();
10344 }
10345 else
10346 {
10347 /*
10348 * We're loading a register from memory.
10349 */
10350 IEM_MC_BEGIN(0, 2);
10351 IEM_MC_LOCAL(uint8_t, u8Value);
10352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10354 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10355 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10356 IEM_MC_ADVANCE_RIP();
10357 IEM_MC_END();
10358 }
10359 return VINF_SUCCESS;
10360}
10361
10362
10363/** Opcode 0x8b. */
10364FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10365{
10366 IEMOP_MNEMONIC("mov Gv,Ev");
10367
10368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10369 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10370
10371 /*
10372 * If rm is denoting a register, no more instruction bytes.
10373 */
10374 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10375 {
10376 switch (pIemCpu->enmEffOpSize)
10377 {
10378 case IEMMODE_16BIT:
10379 IEM_MC_BEGIN(0, 1);
10380 IEM_MC_LOCAL(uint16_t, u16Value);
10381 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10382 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10383 IEM_MC_ADVANCE_RIP();
10384 IEM_MC_END();
10385 break;
10386
10387 case IEMMODE_32BIT:
10388 IEM_MC_BEGIN(0, 1);
10389 IEM_MC_LOCAL(uint32_t, u32Value);
10390 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10391 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10392 IEM_MC_ADVANCE_RIP();
10393 IEM_MC_END();
10394 break;
10395
10396 case IEMMODE_64BIT:
10397 IEM_MC_BEGIN(0, 1);
10398 IEM_MC_LOCAL(uint64_t, u64Value);
10399 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10400 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10401 IEM_MC_ADVANCE_RIP();
10402 IEM_MC_END();
10403 break;
10404 }
10405 }
10406 else
10407 {
10408 /*
10409 * We're loading a register from memory.
10410 */
10411 switch (pIemCpu->enmEffOpSize)
10412 {
10413 case IEMMODE_16BIT:
10414 IEM_MC_BEGIN(0, 2);
10415 IEM_MC_LOCAL(uint16_t, u16Value);
10416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10418 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10419 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10420 IEM_MC_ADVANCE_RIP();
10421 IEM_MC_END();
10422 break;
10423
10424 case IEMMODE_32BIT:
10425 IEM_MC_BEGIN(0, 2);
10426 IEM_MC_LOCAL(uint32_t, u32Value);
10427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10429 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10430 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10431 IEM_MC_ADVANCE_RIP();
10432 IEM_MC_END();
10433 break;
10434
10435 case IEMMODE_64BIT:
10436 IEM_MC_BEGIN(0, 2);
10437 IEM_MC_LOCAL(uint64_t, u64Value);
10438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10440 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10441 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10442 IEM_MC_ADVANCE_RIP();
10443 IEM_MC_END();
10444 break;
10445 }
10446 }
10447 return VINF_SUCCESS;
10448}
10449
10450
10451/** Opcode 0x63. */
10452FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10453{
10454 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10455 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10456 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10457 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10458 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10459}
10460
10461
10462/** Opcode 0x8c. */
10463FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10464{
10465 IEMOP_MNEMONIC("mov Ev,Sw");
10466
10467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10468 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10469
10470 /*
10471 * Check that the destination register exists. The REX.R prefix is ignored.
10472 */
10473 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10474 if ( iSegReg > X86_SREG_GS)
10475 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10476
10477 /*
10478 * If rm is denoting a register, no more instruction bytes.
10479 * In that case, the operand size is respected and the upper bits are
10480 * cleared (starting with some pentium).
10481 */
10482 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10483 {
10484 switch (pIemCpu->enmEffOpSize)
10485 {
10486 case IEMMODE_16BIT:
10487 IEM_MC_BEGIN(0, 1);
10488 IEM_MC_LOCAL(uint16_t, u16Value);
10489 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10490 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10491 IEM_MC_ADVANCE_RIP();
10492 IEM_MC_END();
10493 break;
10494
10495 case IEMMODE_32BIT:
10496 IEM_MC_BEGIN(0, 1);
10497 IEM_MC_LOCAL(uint32_t, u32Value);
10498 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10499 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10500 IEM_MC_ADVANCE_RIP();
10501 IEM_MC_END();
10502 break;
10503
10504 case IEMMODE_64BIT:
10505 IEM_MC_BEGIN(0, 1);
10506 IEM_MC_LOCAL(uint64_t, u64Value);
10507 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10508 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10509 IEM_MC_ADVANCE_RIP();
10510 IEM_MC_END();
10511 break;
10512 }
10513 }
10514 else
10515 {
10516 /*
10517 * We're saving the register to memory. The access is word sized
10518 * regardless of operand size prefixes.
10519 */
10520#if 0 /* not necessary */
10521 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10522#endif
10523 IEM_MC_BEGIN(0, 2);
10524 IEM_MC_LOCAL(uint16_t, u16Value);
10525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10527 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10528 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10529 IEM_MC_ADVANCE_RIP();
10530 IEM_MC_END();
10531 }
10532 return VINF_SUCCESS;
10533}
10534
10535
10536
10537
10538/** Opcode 0x8d. */
10539FNIEMOP_DEF(iemOp_lea_Gv_M)
10540{
10541 IEMOP_MNEMONIC("lea Gv,M");
10542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10543 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10545 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10546
10547 switch (pIemCpu->enmEffOpSize)
10548 {
10549 case IEMMODE_16BIT:
10550 IEM_MC_BEGIN(0, 2);
10551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10552 IEM_MC_LOCAL(uint16_t, u16Cast);
10553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10554 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10555 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10556 IEM_MC_ADVANCE_RIP();
10557 IEM_MC_END();
10558 return VINF_SUCCESS;
10559
10560 case IEMMODE_32BIT:
10561 IEM_MC_BEGIN(0, 2);
10562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10563 IEM_MC_LOCAL(uint32_t, u32Cast);
10564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10565 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10566 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10567 IEM_MC_ADVANCE_RIP();
10568 IEM_MC_END();
10569 return VINF_SUCCESS;
10570
10571 case IEMMODE_64BIT:
10572 IEM_MC_BEGIN(0, 1);
10573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10575 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10576 IEM_MC_ADVANCE_RIP();
10577 IEM_MC_END();
10578 return VINF_SUCCESS;
10579 }
10580 AssertFailedReturn(VERR_IEM_IPE_7);
10581}
10582
10583
10584/** Opcode 0x8e. */
10585FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10586{
10587 IEMOP_MNEMONIC("mov Sw,Ev");
10588
10589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10590 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10591
10592 /*
10593 * The practical operand size is 16-bit.
10594 */
10595#if 0 /* not necessary */
10596 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10597#endif
10598
10599 /*
10600 * Check that the destination register exists and can be used with this
10601 * instruction. The REX.R prefix is ignored.
10602 */
10603 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10604 if ( iSegReg == X86_SREG_CS
10605 || iSegReg > X86_SREG_GS)
10606 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10607
10608 /*
10609 * If rm is denoting a register, no more instruction bytes.
10610 */
10611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10612 {
10613 IEM_MC_BEGIN(2, 0);
10614 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10615 IEM_MC_ARG(uint16_t, u16Value, 1);
10616 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10617 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10618 IEM_MC_END();
10619 }
10620 else
10621 {
10622 /*
10623 * We're loading the register from memory. The access is word sized
10624 * regardless of operand size prefixes.
10625 */
10626 IEM_MC_BEGIN(2, 1);
10627 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10628 IEM_MC_ARG(uint16_t, u16Value, 1);
10629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10631 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10632 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10633 IEM_MC_END();
10634 }
10635 return VINF_SUCCESS;
10636}
10637
10638
10639/** Opcode 0x8f /0. */
10640FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10641{
10642 /* This bugger is rather annoying as it requires rSP to be updated before
10643 doing the effective address calculations. Will eventually require a
10644 split between the R/M+SIB decoding and the effective address
10645 calculation - which is something that is required for any attempt at
10646 reusing this code for a recompiler. It may also be good to have if we
10647 need to delay #UD exception caused by invalid lock prefixes.
10648
10649 For now, we'll do a mostly safe interpreter-only implementation here. */
10650 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10651 * now until tests show it's checked.. */
10652 IEMOP_MNEMONIC("pop Ev");
10653 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10654
10655 /* Register access is relatively easy and can share code. */
10656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10657 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10658
10659 /*
10660 * Memory target.
10661 *
10662 * Intel says that RSP is incremented before it's used in any effective
10663 * address calcuations. This means some serious extra annoyance here since
10664 * we decode and calculate the effective address in one step and like to
10665 * delay committing registers till everything is done.
10666 *
10667 * So, we'll decode and calculate the effective address twice. This will
10668 * require some recoding if turned into a recompiler.
10669 */
10670 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10671
10672#ifndef TST_IEM_CHECK_MC
10673 /* Calc effective address with modified ESP. */
10674 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10675 RTGCPTR GCPtrEff;
10676 VBOXSTRICTRC rcStrict;
10677 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10678 if (rcStrict != VINF_SUCCESS)
10679 return rcStrict;
10680 pIemCpu->offOpcode = offOpcodeSaved;
10681
10682 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10683 uint64_t const RspSaved = pCtx->rsp;
10684 switch (pIemCpu->enmEffOpSize)
10685 {
10686 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10687 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10688 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10690 }
10691 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10692 Assert(rcStrict == VINF_SUCCESS);
10693 pCtx->rsp = RspSaved;
10694
10695 /* Perform the operation - this should be CImpl. */
10696 RTUINT64U TmpRsp;
10697 TmpRsp.u = pCtx->rsp;
10698 switch (pIemCpu->enmEffOpSize)
10699 {
10700 case IEMMODE_16BIT:
10701 {
10702 uint16_t u16Value;
10703 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10704 if (rcStrict == VINF_SUCCESS)
10705 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10706 break;
10707 }
10708
10709 case IEMMODE_32BIT:
10710 {
10711 uint32_t u32Value;
10712 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10713 if (rcStrict == VINF_SUCCESS)
10714 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10715 break;
10716 }
10717
10718 case IEMMODE_64BIT:
10719 {
10720 uint64_t u64Value;
10721 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10722 if (rcStrict == VINF_SUCCESS)
10723 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10724 break;
10725 }
10726
10727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10728 }
10729 if (rcStrict == VINF_SUCCESS)
10730 {
10731 pCtx->rsp = TmpRsp.u;
10732 iemRegUpdateRipAndClearRF(pIemCpu);
10733 }
10734 return rcStrict;
10735
10736#else
10737 return VERR_IEM_IPE_2;
10738#endif
10739}
10740
10741
10742/** Opcode 0x8f. */
10743FNIEMOP_DEF(iemOp_Grp1A)
10744{
10745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10746 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10747 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10748
10749 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10750 /** @todo XOP decoding. */
10751 IEMOP_MNEMONIC("3-byte-xop");
10752 return IEMOP_RAISE_INVALID_OPCODE();
10753}
10754
10755
10756/**
10757 * Common 'xchg reg,rAX' helper.
10758 */
10759FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10760{
10761 IEMOP_HLP_NO_LOCK_PREFIX();
10762
10763 iReg |= pIemCpu->uRexB;
10764 switch (pIemCpu->enmEffOpSize)
10765 {
10766 case IEMMODE_16BIT:
10767 IEM_MC_BEGIN(0, 2);
10768 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10769 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10770 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10771 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10772 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10773 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10774 IEM_MC_ADVANCE_RIP();
10775 IEM_MC_END();
10776 return VINF_SUCCESS;
10777
10778 case IEMMODE_32BIT:
10779 IEM_MC_BEGIN(0, 2);
10780 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10781 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10782 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10783 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10784 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10785 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10786 IEM_MC_ADVANCE_RIP();
10787 IEM_MC_END();
10788 return VINF_SUCCESS;
10789
10790 case IEMMODE_64BIT:
10791 IEM_MC_BEGIN(0, 2);
10792 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10793 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10794 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10795 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10796 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10797 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10798 IEM_MC_ADVANCE_RIP();
10799 IEM_MC_END();
10800 return VINF_SUCCESS;
10801
10802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10803 }
10804}
10805
10806
10807/** Opcode 0x90. */
10808FNIEMOP_DEF(iemOp_nop)
10809{
10810 /* R8/R8D and RAX/EAX can be exchanged. */
10811 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10812 {
10813 IEMOP_MNEMONIC("xchg r8,rAX");
10814 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10815 }
10816
10817 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10818 IEMOP_MNEMONIC("pause");
10819 else
10820 IEMOP_MNEMONIC("nop");
10821 IEM_MC_BEGIN(0, 0);
10822 IEM_MC_ADVANCE_RIP();
10823 IEM_MC_END();
10824 return VINF_SUCCESS;
10825}
10826
10827
10828/** Opcode 0x91. */
10829FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10830{
10831 IEMOP_MNEMONIC("xchg rCX,rAX");
10832 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10833}
10834
10835
10836/** Opcode 0x92. */
10837FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10838{
10839 IEMOP_MNEMONIC("xchg rDX,rAX");
10840 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10841}
10842
10843
10844/** Opcode 0x93. */
10845FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10846{
10847 IEMOP_MNEMONIC("xchg rBX,rAX");
10848 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10849}
10850
10851
10852/** Opcode 0x94. */
10853FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10854{
10855 IEMOP_MNEMONIC("xchg rSX,rAX");
10856 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10857}
10858
10859
10860/** Opcode 0x95. */
10861FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10862{
10863 IEMOP_MNEMONIC("xchg rBP,rAX");
10864 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10865}
10866
10867
10868/** Opcode 0x96. */
10869FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10870{
10871 IEMOP_MNEMONIC("xchg rSI,rAX");
10872 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10873}
10874
10875
10876/** Opcode 0x97. */
10877FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10878{
10879 IEMOP_MNEMONIC("xchg rDI,rAX");
10880 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10881}
10882
10883
10884/** Opcode 0x98. */
10885FNIEMOP_DEF(iemOp_cbw)
10886{
10887 IEMOP_HLP_NO_LOCK_PREFIX();
10888 switch (pIemCpu->enmEffOpSize)
10889 {
10890 case IEMMODE_16BIT:
10891 IEMOP_MNEMONIC("cbw");
10892 IEM_MC_BEGIN(0, 1);
10893 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10894 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10895 } IEM_MC_ELSE() {
10896 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10897 } IEM_MC_ENDIF();
10898 IEM_MC_ADVANCE_RIP();
10899 IEM_MC_END();
10900 return VINF_SUCCESS;
10901
10902 case IEMMODE_32BIT:
10903 IEMOP_MNEMONIC("cwde");
10904 IEM_MC_BEGIN(0, 1);
10905 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10906 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10907 } IEM_MC_ELSE() {
10908 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10909 } IEM_MC_ENDIF();
10910 IEM_MC_ADVANCE_RIP();
10911 IEM_MC_END();
10912 return VINF_SUCCESS;
10913
10914 case IEMMODE_64BIT:
10915 IEMOP_MNEMONIC("cdqe");
10916 IEM_MC_BEGIN(0, 1);
10917 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10918 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10919 } IEM_MC_ELSE() {
10920 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10921 } IEM_MC_ENDIF();
10922 IEM_MC_ADVANCE_RIP();
10923 IEM_MC_END();
10924 return VINF_SUCCESS;
10925
10926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10927 }
10928}
10929
10930
10931/** Opcode 0x99. */
10932FNIEMOP_DEF(iemOp_cwd)
10933{
10934 IEMOP_HLP_NO_LOCK_PREFIX();
10935 switch (pIemCpu->enmEffOpSize)
10936 {
10937 case IEMMODE_16BIT:
10938 IEMOP_MNEMONIC("cwd");
10939 IEM_MC_BEGIN(0, 1);
10940 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10941 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10942 } IEM_MC_ELSE() {
10943 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10944 } IEM_MC_ENDIF();
10945 IEM_MC_ADVANCE_RIP();
10946 IEM_MC_END();
10947 return VINF_SUCCESS;
10948
10949 case IEMMODE_32BIT:
10950 IEMOP_MNEMONIC("cdq");
10951 IEM_MC_BEGIN(0, 1);
10952 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10953 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10954 } IEM_MC_ELSE() {
10955 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10956 } IEM_MC_ENDIF();
10957 IEM_MC_ADVANCE_RIP();
10958 IEM_MC_END();
10959 return VINF_SUCCESS;
10960
10961 case IEMMODE_64BIT:
10962 IEMOP_MNEMONIC("cqo");
10963 IEM_MC_BEGIN(0, 1);
10964 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10965 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10966 } IEM_MC_ELSE() {
10967 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10968 } IEM_MC_ENDIF();
10969 IEM_MC_ADVANCE_RIP();
10970 IEM_MC_END();
10971 return VINF_SUCCESS;
10972
10973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10974 }
10975}
10976
10977
10978/** Opcode 0x9a. */
10979FNIEMOP_DEF(iemOp_call_Ap)
10980{
10981 IEMOP_MNEMONIC("call Ap");
10982 IEMOP_HLP_NO_64BIT();
10983
10984 /* Decode the far pointer address and pass it on to the far call C implementation. */
10985 uint32_t offSeg;
10986 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10987 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10988 else
10989 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10990 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10992 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10993}
10994
10995
10996/** Opcode 0x9b. (aka fwait) */
10997FNIEMOP_DEF(iemOp_wait)
10998{
10999 IEMOP_MNEMONIC("wait");
11000 IEMOP_HLP_NO_LOCK_PREFIX();
11001
11002 IEM_MC_BEGIN(0, 0);
11003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11005 IEM_MC_ADVANCE_RIP();
11006 IEM_MC_END();
11007 return VINF_SUCCESS;
11008}
11009
11010
11011/** Opcode 0x9c. */
11012FNIEMOP_DEF(iemOp_pushf_Fv)
11013{
11014 IEMOP_HLP_NO_LOCK_PREFIX();
11015 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11016 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
11017}
11018
11019
11020/** Opcode 0x9d. */
11021FNIEMOP_DEF(iemOp_popf_Fv)
11022{
11023 IEMOP_HLP_NO_LOCK_PREFIX();
11024 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11025 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
11026}
11027
11028
11029/** Opcode 0x9e. */
11030FNIEMOP_DEF(iemOp_sahf)
11031{
11032 IEMOP_MNEMONIC("sahf");
11033 IEMOP_HLP_NO_LOCK_PREFIX();
11034 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11035 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11036 return IEMOP_RAISE_INVALID_OPCODE();
11037 IEM_MC_BEGIN(0, 2);
11038 IEM_MC_LOCAL(uint32_t, u32Flags);
11039 IEM_MC_LOCAL(uint32_t, EFlags);
11040 IEM_MC_FETCH_EFLAGS(EFlags);
11041 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11042 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11043 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11044 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11045 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11046 IEM_MC_COMMIT_EFLAGS(EFlags);
11047 IEM_MC_ADVANCE_RIP();
11048 IEM_MC_END();
11049 return VINF_SUCCESS;
11050}
11051
11052
11053/** Opcode 0x9f. */
11054FNIEMOP_DEF(iemOp_lahf)
11055{
11056 IEMOP_MNEMONIC("lahf");
11057 IEMOP_HLP_NO_LOCK_PREFIX();
11058 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11059 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11060 return IEMOP_RAISE_INVALID_OPCODE();
11061 IEM_MC_BEGIN(0, 1);
11062 IEM_MC_LOCAL(uint8_t, u8Flags);
11063 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11064 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11065 IEM_MC_ADVANCE_RIP();
11066 IEM_MC_END();
11067 return VINF_SUCCESS;
11068}
11069
11070
11071/**
11072 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11073 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11074 * prefixes. Will return on failures.
11075 * @param a_GCPtrMemOff The variable to store the offset in.
11076 */
11077#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11078 do \
11079 { \
11080 switch (pIemCpu->enmEffAddrMode) \
11081 { \
11082 case IEMMODE_16BIT: \
11083 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11084 break; \
11085 case IEMMODE_32BIT: \
11086 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11087 break; \
11088 case IEMMODE_64BIT: \
11089 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11090 break; \
11091 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11092 } \
11093 IEMOP_HLP_NO_LOCK_PREFIX(); \
11094 } while (0)
11095
11096/** Opcode 0xa0. */
11097FNIEMOP_DEF(iemOp_mov_Al_Ob)
11098{
11099 /*
11100 * Get the offset and fend of lock prefixes.
11101 */
11102 RTGCPTR GCPtrMemOff;
11103 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11104
11105 /*
11106 * Fetch AL.
11107 */
11108 IEM_MC_BEGIN(0,1);
11109 IEM_MC_LOCAL(uint8_t, u8Tmp);
11110 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11111 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11112 IEM_MC_ADVANCE_RIP();
11113 IEM_MC_END();
11114 return VINF_SUCCESS;
11115}
11116
11117
11118/** Opcode 0xa1. */
11119FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11120{
11121 /*
11122 * Get the offset and fend of lock prefixes.
11123 */
11124 IEMOP_MNEMONIC("mov rAX,Ov");
11125 RTGCPTR GCPtrMemOff;
11126 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11127
11128 /*
11129 * Fetch rAX.
11130 */
11131 switch (pIemCpu->enmEffOpSize)
11132 {
11133 case IEMMODE_16BIT:
11134 IEM_MC_BEGIN(0,1);
11135 IEM_MC_LOCAL(uint16_t, u16Tmp);
11136 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11137 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11138 IEM_MC_ADVANCE_RIP();
11139 IEM_MC_END();
11140 return VINF_SUCCESS;
11141
11142 case IEMMODE_32BIT:
11143 IEM_MC_BEGIN(0,1);
11144 IEM_MC_LOCAL(uint32_t, u32Tmp);
11145 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11146 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11147 IEM_MC_ADVANCE_RIP();
11148 IEM_MC_END();
11149 return VINF_SUCCESS;
11150
11151 case IEMMODE_64BIT:
11152 IEM_MC_BEGIN(0,1);
11153 IEM_MC_LOCAL(uint64_t, u64Tmp);
11154 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11155 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11156 IEM_MC_ADVANCE_RIP();
11157 IEM_MC_END();
11158 return VINF_SUCCESS;
11159
11160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11161 }
11162}
11163
11164
11165/** Opcode 0xa2. */
11166FNIEMOP_DEF(iemOp_mov_Ob_AL)
11167{
11168 /*
11169 * Get the offset and fend of lock prefixes.
11170 */
11171 RTGCPTR GCPtrMemOff;
11172 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11173
11174 /*
11175 * Store AL.
11176 */
11177 IEM_MC_BEGIN(0,1);
11178 IEM_MC_LOCAL(uint8_t, u8Tmp);
11179 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11180 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
11181 IEM_MC_ADVANCE_RIP();
11182 IEM_MC_END();
11183 return VINF_SUCCESS;
11184}
11185
11186
11187/** Opcode 0xa3. */
11188FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11189{
11190 /*
11191 * Get the offset and fend of lock prefixes.
11192 */
11193 RTGCPTR GCPtrMemOff;
11194 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11195
11196 /*
11197 * Store rAX.
11198 */
11199 switch (pIemCpu->enmEffOpSize)
11200 {
11201 case IEMMODE_16BIT:
11202 IEM_MC_BEGIN(0,1);
11203 IEM_MC_LOCAL(uint16_t, u16Tmp);
11204 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11205 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
11206 IEM_MC_ADVANCE_RIP();
11207 IEM_MC_END();
11208 return VINF_SUCCESS;
11209
11210 case IEMMODE_32BIT:
11211 IEM_MC_BEGIN(0,1);
11212 IEM_MC_LOCAL(uint32_t, u32Tmp);
11213 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11214 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11215 IEM_MC_ADVANCE_RIP();
11216 IEM_MC_END();
11217 return VINF_SUCCESS;
11218
11219 case IEMMODE_64BIT:
11220 IEM_MC_BEGIN(0,1);
11221 IEM_MC_LOCAL(uint64_t, u64Tmp);
11222 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11223 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11224 IEM_MC_ADVANCE_RIP();
11225 IEM_MC_END();
11226 return VINF_SUCCESS;
11227
11228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11229 }
11230}
11231
11232/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11233#define IEM_MOVS_CASE(ValBits, AddrBits) \
11234 IEM_MC_BEGIN(0, 2); \
11235 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11236 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11237 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11238 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11239 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11240 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11242 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11243 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11244 } IEM_MC_ELSE() { \
11245 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11246 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11247 } IEM_MC_ENDIF(); \
11248 IEM_MC_ADVANCE_RIP(); \
11249 IEM_MC_END();
11250
11251/** Opcode 0xa4. */
11252FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11253{
11254 IEMOP_HLP_NO_LOCK_PREFIX();
11255
11256 /*
11257 * Use the C implementation if a repeat prefix is encountered.
11258 */
11259 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11260 {
11261 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11262 switch (pIemCpu->enmEffAddrMode)
11263 {
11264 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11265 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11266 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11268 }
11269 }
11270 IEMOP_MNEMONIC("movsb Xb,Yb");
11271
11272 /*
11273 * Sharing case implementation with movs[wdq] below.
11274 */
11275 switch (pIemCpu->enmEffAddrMode)
11276 {
11277 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11278 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11279 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11281 }
11282 return VINF_SUCCESS;
11283}
11284
11285
11286/** Opcode 0xa5. */
11287FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11288{
11289 IEMOP_HLP_NO_LOCK_PREFIX();
11290
11291 /*
11292 * Use the C implementation if a repeat prefix is encountered.
11293 */
11294 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11295 {
11296 IEMOP_MNEMONIC("rep movs Xv,Yv");
11297 switch (pIemCpu->enmEffOpSize)
11298 {
11299 case IEMMODE_16BIT:
11300 switch (pIemCpu->enmEffAddrMode)
11301 {
11302 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11303 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11304 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11306 }
11307 break;
11308 case IEMMODE_32BIT:
11309 switch (pIemCpu->enmEffAddrMode)
11310 {
11311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11315 }
11316 case IEMMODE_64BIT:
11317 switch (pIemCpu->enmEffAddrMode)
11318 {
11319 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11320 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11321 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11323 }
11324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11325 }
11326 }
11327 IEMOP_MNEMONIC("movs Xv,Yv");
11328
11329 /*
11330 * Annoying double switch here.
11331 * Using ugly macro for implementing the cases, sharing it with movsb.
11332 */
11333 switch (pIemCpu->enmEffOpSize)
11334 {
11335 case IEMMODE_16BIT:
11336 switch (pIemCpu->enmEffAddrMode)
11337 {
11338 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11339 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11340 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11342 }
11343 break;
11344
11345 case IEMMODE_32BIT:
11346 switch (pIemCpu->enmEffAddrMode)
11347 {
11348 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11349 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11350 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11352 }
11353 break;
11354
11355 case IEMMODE_64BIT:
11356 switch (pIemCpu->enmEffAddrMode)
11357 {
11358 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11359 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11360 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11362 }
11363 break;
11364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11365 }
11366 return VINF_SUCCESS;
11367}
11368
11369#undef IEM_MOVS_CASE
11370
11371/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11372#define IEM_CMPS_CASE(ValBits, AddrBits) \
11373 IEM_MC_BEGIN(3, 3); \
11374 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11375 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11376 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11377 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11378 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11379 \
11380 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11381 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11382 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11383 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11384 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11385 IEM_MC_REF_EFLAGS(pEFlags); \
11386 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11387 \
11388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11389 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11390 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11391 } IEM_MC_ELSE() { \
11392 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11393 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11394 } IEM_MC_ENDIF(); \
11395 IEM_MC_ADVANCE_RIP(); \
11396 IEM_MC_END(); \
11397
11398/** Opcode 0xa6. */
11399FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11400{
11401 IEMOP_HLP_NO_LOCK_PREFIX();
11402
11403 /*
11404 * Use the C implementation if a repeat prefix is encountered.
11405 */
11406 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11407 {
11408 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11409 switch (pIemCpu->enmEffAddrMode)
11410 {
11411 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11412 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11413 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11415 }
11416 }
11417 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11418 {
11419 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11420 switch (pIemCpu->enmEffAddrMode)
11421 {
11422 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11423 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11424 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11426 }
11427 }
11428 IEMOP_MNEMONIC("cmps Xb,Yb");
11429
11430 /*
11431 * Sharing case implementation with cmps[wdq] below.
11432 */
11433 switch (pIemCpu->enmEffAddrMode)
11434 {
11435 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11436 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11437 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11439 }
11440 return VINF_SUCCESS;
11441
11442}
11443
11444
11445/** Opcode 0xa7. */
11446FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11447{
11448 IEMOP_HLP_NO_LOCK_PREFIX();
11449
11450 /*
11451 * Use the C implementation if a repeat prefix is encountered.
11452 */
11453 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11454 {
11455 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11456 switch (pIemCpu->enmEffOpSize)
11457 {
11458 case IEMMODE_16BIT:
11459 switch (pIemCpu->enmEffAddrMode)
11460 {
11461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11465 }
11466 break;
11467 case IEMMODE_32BIT:
11468 switch (pIemCpu->enmEffAddrMode)
11469 {
11470 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11471 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11472 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11473 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11474 }
11475 case IEMMODE_64BIT:
11476 switch (pIemCpu->enmEffAddrMode)
11477 {
11478 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11479 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11480 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11482 }
11483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11484 }
11485 }
11486
11487 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11488 {
11489 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11490 switch (pIemCpu->enmEffOpSize)
11491 {
11492 case IEMMODE_16BIT:
11493 switch (pIemCpu->enmEffAddrMode)
11494 {
11495 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11496 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11497 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11499 }
11500 break;
11501 case IEMMODE_32BIT:
11502 switch (pIemCpu->enmEffAddrMode)
11503 {
11504 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11505 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11506 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11508 }
11509 case IEMMODE_64BIT:
11510 switch (pIemCpu->enmEffAddrMode)
11511 {
11512 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11513 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11514 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11516 }
11517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11518 }
11519 }
11520
11521 IEMOP_MNEMONIC("cmps Xv,Yv");
11522
11523 /*
11524 * Annoying double switch here.
11525 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11526 */
11527 switch (pIemCpu->enmEffOpSize)
11528 {
11529 case IEMMODE_16BIT:
11530 switch (pIemCpu->enmEffAddrMode)
11531 {
11532 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11533 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11534 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11536 }
11537 break;
11538
11539 case IEMMODE_32BIT:
11540 switch (pIemCpu->enmEffAddrMode)
11541 {
11542 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11543 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11544 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11546 }
11547 break;
11548
11549 case IEMMODE_64BIT:
11550 switch (pIemCpu->enmEffAddrMode)
11551 {
11552 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11553 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11554 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11556 }
11557 break;
11558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11559 }
11560 return VINF_SUCCESS;
11561
11562}
11563
11564#undef IEM_CMPS_CASE
11565
11566/** Opcode 0xa8. */
11567FNIEMOP_DEF(iemOp_test_AL_Ib)
11568{
11569 IEMOP_MNEMONIC("test al,Ib");
11570 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11571 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11572}
11573
11574
11575/** Opcode 0xa9. */
11576FNIEMOP_DEF(iemOp_test_eAX_Iz)
11577{
11578 IEMOP_MNEMONIC("test rAX,Iz");
11579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11580 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11581}
11582
11583
11584/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11585#define IEM_STOS_CASE(ValBits, AddrBits) \
11586 IEM_MC_BEGIN(0, 2); \
11587 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11588 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11589 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11590 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11591 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11593 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11594 } IEM_MC_ELSE() { \
11595 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11596 } IEM_MC_ENDIF(); \
11597 IEM_MC_ADVANCE_RIP(); \
11598 IEM_MC_END(); \
11599
11600/** Opcode 0xaa. */
11601FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11602{
11603 IEMOP_HLP_NO_LOCK_PREFIX();
11604
11605 /*
11606 * Use the C implementation if a repeat prefix is encountered.
11607 */
11608 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11609 {
11610 IEMOP_MNEMONIC("rep stos Yb,al");
11611 switch (pIemCpu->enmEffAddrMode)
11612 {
11613 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11614 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11615 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11617 }
11618 }
11619 IEMOP_MNEMONIC("stos Yb,al");
11620
11621 /*
11622 * Sharing case implementation with stos[wdq] below.
11623 */
11624 switch (pIemCpu->enmEffAddrMode)
11625 {
11626 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11627 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11628 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11630 }
11631 return VINF_SUCCESS;
11632}
11633
11634
11635/** Opcode 0xab. */
11636FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11637{
11638 IEMOP_HLP_NO_LOCK_PREFIX();
11639
11640 /*
11641 * Use the C implementation if a repeat prefix is encountered.
11642 */
11643 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11644 {
11645 IEMOP_MNEMONIC("rep stos Yv,rAX");
11646 switch (pIemCpu->enmEffOpSize)
11647 {
11648 case IEMMODE_16BIT:
11649 switch (pIemCpu->enmEffAddrMode)
11650 {
11651 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11652 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11653 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11655 }
11656 break;
11657 case IEMMODE_32BIT:
11658 switch (pIemCpu->enmEffAddrMode)
11659 {
11660 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11661 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11662 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11664 }
11665 case IEMMODE_64BIT:
11666 switch (pIemCpu->enmEffAddrMode)
11667 {
11668 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11669 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11670 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11672 }
11673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11674 }
11675 }
11676 IEMOP_MNEMONIC("stos Yv,rAX");
11677
11678 /*
11679 * Annoying double switch here.
11680 * Using ugly macro for implementing the cases, sharing it with stosb.
11681 */
11682 switch (pIemCpu->enmEffOpSize)
11683 {
11684 case IEMMODE_16BIT:
11685 switch (pIemCpu->enmEffAddrMode)
11686 {
11687 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11688 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11689 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11691 }
11692 break;
11693
11694 case IEMMODE_32BIT:
11695 switch (pIemCpu->enmEffAddrMode)
11696 {
11697 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11698 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11699 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11701 }
11702 break;
11703
11704 case IEMMODE_64BIT:
11705 switch (pIemCpu->enmEffAddrMode)
11706 {
11707 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11708 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11709 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11711 }
11712 break;
11713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11714 }
11715 return VINF_SUCCESS;
11716}
11717
11718#undef IEM_STOS_CASE
11719
11720/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11721#define IEM_LODS_CASE(ValBits, AddrBits) \
11722 IEM_MC_BEGIN(0, 2); \
11723 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11724 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11725 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11726 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11727 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11728 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11729 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11730 } IEM_MC_ELSE() { \
11731 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11732 } IEM_MC_ENDIF(); \
11733 IEM_MC_ADVANCE_RIP(); \
11734 IEM_MC_END();
11735
11736/** Opcode 0xac. */
11737FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11738{
11739 IEMOP_HLP_NO_LOCK_PREFIX();
11740
11741 /*
11742 * Use the C implementation if a repeat prefix is encountered.
11743 */
11744 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11745 {
11746 IEMOP_MNEMONIC("rep lodsb al,Xb");
11747 switch (pIemCpu->enmEffAddrMode)
11748 {
11749 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11750 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11751 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11753 }
11754 }
11755 IEMOP_MNEMONIC("lodsb al,Xb");
11756
11757 /*
11758 * Sharing case implementation with stos[wdq] below.
11759 */
11760 switch (pIemCpu->enmEffAddrMode)
11761 {
11762 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11763 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11764 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11766 }
11767 return VINF_SUCCESS;
11768}
11769
11770
11771/** Opcode 0xad. */
11772FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11773{
11774 IEMOP_HLP_NO_LOCK_PREFIX();
11775
11776 /*
11777 * Use the C implementation if a repeat prefix is encountered.
11778 */
11779 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11780 {
11781 IEMOP_MNEMONIC("rep lods rAX,Xv");
11782 switch (pIemCpu->enmEffOpSize)
11783 {
11784 case IEMMODE_16BIT:
11785 switch (pIemCpu->enmEffAddrMode)
11786 {
11787 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11788 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11789 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11791 }
11792 break;
11793 case IEMMODE_32BIT:
11794 switch (pIemCpu->enmEffAddrMode)
11795 {
11796 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11797 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11798 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11800 }
11801 case IEMMODE_64BIT:
11802 switch (pIemCpu->enmEffAddrMode)
11803 {
11804 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11805 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11806 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11808 }
11809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11810 }
11811 }
11812 IEMOP_MNEMONIC("lods rAX,Xv");
11813
11814 /*
11815 * Annoying double switch here.
11816 * Using ugly macro for implementing the cases, sharing it with lodsb.
11817 */
11818 switch (pIemCpu->enmEffOpSize)
11819 {
11820 case IEMMODE_16BIT:
11821 switch (pIemCpu->enmEffAddrMode)
11822 {
11823 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11824 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11825 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11827 }
11828 break;
11829
11830 case IEMMODE_32BIT:
11831 switch (pIemCpu->enmEffAddrMode)
11832 {
11833 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11834 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11835 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11837 }
11838 break;
11839
11840 case IEMMODE_64BIT:
11841 switch (pIemCpu->enmEffAddrMode)
11842 {
11843 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11844 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11845 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11847 }
11848 break;
11849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11850 }
11851 return VINF_SUCCESS;
11852}
11853
11854#undef IEM_LODS_CASE
11855
11856/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11857#define IEM_SCAS_CASE(ValBits, AddrBits) \
11858 IEM_MC_BEGIN(3, 2); \
11859 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11860 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11861 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11862 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11863 \
11864 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11865 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11866 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11867 IEM_MC_REF_EFLAGS(pEFlags); \
11868 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11869 \
11870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11871 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11872 } IEM_MC_ELSE() { \
11873 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11874 } IEM_MC_ENDIF(); \
11875 IEM_MC_ADVANCE_RIP(); \
11876 IEM_MC_END();
11877
11878/** Opcode 0xae. */
11879FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11880{
11881 IEMOP_HLP_NO_LOCK_PREFIX();
11882
11883 /*
11884 * Use the C implementation if a repeat prefix is encountered.
11885 */
11886 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11887 {
11888 IEMOP_MNEMONIC("repe scasb al,Xb");
11889 switch (pIemCpu->enmEffAddrMode)
11890 {
11891 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11892 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11893 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11895 }
11896 }
11897 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11898 {
11899 IEMOP_MNEMONIC("repne scasb al,Xb");
11900 switch (pIemCpu->enmEffAddrMode)
11901 {
11902 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11903 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11904 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11906 }
11907 }
11908 IEMOP_MNEMONIC("scasb al,Xb");
11909
11910 /*
11911 * Sharing case implementation with stos[wdq] below.
11912 */
11913 switch (pIemCpu->enmEffAddrMode)
11914 {
11915 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11916 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11917 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11919 }
11920 return VINF_SUCCESS;
11921}
11922
11923
11924/** Opcode 0xaf. */
11925FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11926{
11927 IEMOP_HLP_NO_LOCK_PREFIX();
11928
11929 /*
11930 * Use the C implementation if a repeat prefix is encountered.
11931 */
11932 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11933 {
11934 IEMOP_MNEMONIC("repe scas rAX,Xv");
11935 switch (pIemCpu->enmEffOpSize)
11936 {
11937 case IEMMODE_16BIT:
11938 switch (pIemCpu->enmEffAddrMode)
11939 {
11940 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11941 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11942 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11944 }
11945 break;
11946 case IEMMODE_32BIT:
11947 switch (pIemCpu->enmEffAddrMode)
11948 {
11949 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11953 }
11954 case IEMMODE_64BIT:
11955 switch (pIemCpu->enmEffAddrMode)
11956 {
11957 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11958 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11959 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11961 }
11962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11963 }
11964 }
11965 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11966 {
11967 IEMOP_MNEMONIC("repne scas rAX,Xv");
11968 switch (pIemCpu->enmEffOpSize)
11969 {
11970 case IEMMODE_16BIT:
11971 switch (pIemCpu->enmEffAddrMode)
11972 {
11973 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11974 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11975 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11977 }
11978 break;
11979 case IEMMODE_32BIT:
11980 switch (pIemCpu->enmEffAddrMode)
11981 {
11982 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11983 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11984 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11986 }
11987 case IEMMODE_64BIT:
11988 switch (pIemCpu->enmEffAddrMode)
11989 {
11990 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11991 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11992 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11994 }
11995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11996 }
11997 }
11998 IEMOP_MNEMONIC("scas rAX,Xv");
11999
12000 /*
12001 * Annoying double switch here.
12002 * Using ugly macro for implementing the cases, sharing it with scasb.
12003 */
12004 switch (pIemCpu->enmEffOpSize)
12005 {
12006 case IEMMODE_16BIT:
12007 switch (pIemCpu->enmEffAddrMode)
12008 {
12009 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12010 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12011 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12013 }
12014 break;
12015
12016 case IEMMODE_32BIT:
12017 switch (pIemCpu->enmEffAddrMode)
12018 {
12019 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12020 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12021 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12023 }
12024 break;
12025
12026 case IEMMODE_64BIT:
12027 switch (pIemCpu->enmEffAddrMode)
12028 {
12029 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12030 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12031 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12033 }
12034 break;
12035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12036 }
12037 return VINF_SUCCESS;
12038}
12039
12040#undef IEM_SCAS_CASE
12041
12042/**
12043 * Common 'mov r8, imm8' helper.
12044 */
12045FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12046{
12047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12048 IEMOP_HLP_NO_LOCK_PREFIX();
12049
12050 IEM_MC_BEGIN(0, 1);
12051 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12052 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12053 IEM_MC_ADVANCE_RIP();
12054 IEM_MC_END();
12055
12056 return VINF_SUCCESS;
12057}
12058
12059
12060/** Opcode 0xb0. */
12061FNIEMOP_DEF(iemOp_mov_AL_Ib)
12062{
12063 IEMOP_MNEMONIC("mov AL,Ib");
12064 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
12065}
12066
12067
12068/** Opcode 0xb1. */
12069FNIEMOP_DEF(iemOp_CL_Ib)
12070{
12071 IEMOP_MNEMONIC("mov CL,Ib");
12072 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
12073}
12074
12075
12076/** Opcode 0xb2. */
12077FNIEMOP_DEF(iemOp_DL_Ib)
12078{
12079 IEMOP_MNEMONIC("mov DL,Ib");
12080 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
12081}
12082
12083
12084/** Opcode 0xb3. */
12085FNIEMOP_DEF(iemOp_BL_Ib)
12086{
12087 IEMOP_MNEMONIC("mov BL,Ib");
12088 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
12089}
12090
12091
12092/** Opcode 0xb4. */
12093FNIEMOP_DEF(iemOp_mov_AH_Ib)
12094{
12095 IEMOP_MNEMONIC("mov AH,Ib");
12096 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
12097}
12098
12099
12100/** Opcode 0xb5. */
12101FNIEMOP_DEF(iemOp_CH_Ib)
12102{
12103 IEMOP_MNEMONIC("mov CH,Ib");
12104 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
12105}
12106
12107
12108/** Opcode 0xb6. */
12109FNIEMOP_DEF(iemOp_DH_Ib)
12110{
12111 IEMOP_MNEMONIC("mov DH,Ib");
12112 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
12113}
12114
12115
12116/** Opcode 0xb7. */
12117FNIEMOP_DEF(iemOp_BH_Ib)
12118{
12119 IEMOP_MNEMONIC("mov BH,Ib");
12120 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
12121}
12122
12123
12124/**
12125 * Common 'mov regX,immX' helper.
12126 */
12127FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12128{
12129 switch (pIemCpu->enmEffOpSize)
12130 {
12131 case IEMMODE_16BIT:
12132 {
12133 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12134 IEMOP_HLP_NO_LOCK_PREFIX();
12135
12136 IEM_MC_BEGIN(0, 1);
12137 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12138 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12139 IEM_MC_ADVANCE_RIP();
12140 IEM_MC_END();
12141 break;
12142 }
12143
12144 case IEMMODE_32BIT:
12145 {
12146 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12147 IEMOP_HLP_NO_LOCK_PREFIX();
12148
12149 IEM_MC_BEGIN(0, 1);
12150 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12151 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12152 IEM_MC_ADVANCE_RIP();
12153 IEM_MC_END();
12154 break;
12155 }
12156 case IEMMODE_64BIT:
12157 {
12158 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12159 IEMOP_HLP_NO_LOCK_PREFIX();
12160
12161 IEM_MC_BEGIN(0, 1);
12162 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12163 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12164 IEM_MC_ADVANCE_RIP();
12165 IEM_MC_END();
12166 break;
12167 }
12168 }
12169
12170 return VINF_SUCCESS;
12171}
12172
12173
12174/** Opcode 0xb8. */
12175FNIEMOP_DEF(iemOp_eAX_Iv)
12176{
12177 IEMOP_MNEMONIC("mov rAX,IV");
12178 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
12179}
12180
12181
12182/** Opcode 0xb9. */
12183FNIEMOP_DEF(iemOp_eCX_Iv)
12184{
12185 IEMOP_MNEMONIC("mov rCX,IV");
12186 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
12187}
12188
12189
12190/** Opcode 0xba. */
12191FNIEMOP_DEF(iemOp_eDX_Iv)
12192{
12193 IEMOP_MNEMONIC("mov rDX,IV");
12194 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
12195}
12196
12197
12198/** Opcode 0xbb. */
12199FNIEMOP_DEF(iemOp_eBX_Iv)
12200{
12201 IEMOP_MNEMONIC("mov rBX,IV");
12202 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
12203}
12204
12205
12206/** Opcode 0xbc. */
12207FNIEMOP_DEF(iemOp_eSP_Iv)
12208{
12209 IEMOP_MNEMONIC("mov rSP,IV");
12210 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12211}
12212
12213
12214/** Opcode 0xbd. */
12215FNIEMOP_DEF(iemOp_eBP_Iv)
12216{
12217 IEMOP_MNEMONIC("mov rBP,IV");
12218 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12219}
12220
12221
12222/** Opcode 0xbe. */
12223FNIEMOP_DEF(iemOp_eSI_Iv)
12224{
12225 IEMOP_MNEMONIC("mov rSI,IV");
12226 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12227}
12228
12229
12230/** Opcode 0xbf. */
12231FNIEMOP_DEF(iemOp_eDI_Iv)
12232{
12233 IEMOP_MNEMONIC("mov rDI,IV");
12234 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12235}
12236
12237
12238/** Opcode 0xc0. */
12239FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12240{
12241 IEMOP_HLP_MIN_186();
12242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12243 PCIEMOPSHIFTSIZES pImpl;
12244 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12245 {
12246 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12247 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12248 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12249 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12250 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12251 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12252 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12253 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12254 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12255 }
12256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12257
12258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12259 {
12260 /* register */
12261 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12262 IEMOP_HLP_NO_LOCK_PREFIX();
12263 IEM_MC_BEGIN(3, 0);
12264 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12265 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12266 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12267 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12268 IEM_MC_REF_EFLAGS(pEFlags);
12269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12270 IEM_MC_ADVANCE_RIP();
12271 IEM_MC_END();
12272 }
12273 else
12274 {
12275 /* memory */
12276 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12277 IEM_MC_BEGIN(3, 2);
12278 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12279 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12280 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12282
12283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12284 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12285 IEM_MC_ASSIGN(cShiftArg, cShift);
12286 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12287 IEM_MC_FETCH_EFLAGS(EFlags);
12288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12289
12290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12291 IEM_MC_COMMIT_EFLAGS(EFlags);
12292 IEM_MC_ADVANCE_RIP();
12293 IEM_MC_END();
12294 }
12295 return VINF_SUCCESS;
12296}
12297
12298
12299/** Opcode 0xc1. */
12300FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12301{
12302 IEMOP_HLP_MIN_186();
12303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12304 PCIEMOPSHIFTSIZES pImpl;
12305 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12306 {
12307 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12308 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12309 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12310 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12311 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12312 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12313 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12314 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12315 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12316 }
12317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12318
12319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12320 {
12321 /* register */
12322 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12323 IEMOP_HLP_NO_LOCK_PREFIX();
12324 switch (pIemCpu->enmEffOpSize)
12325 {
12326 case IEMMODE_16BIT:
12327 IEM_MC_BEGIN(3, 0);
12328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12329 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12331 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12332 IEM_MC_REF_EFLAGS(pEFlags);
12333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12334 IEM_MC_ADVANCE_RIP();
12335 IEM_MC_END();
12336 return VINF_SUCCESS;
12337
12338 case IEMMODE_32BIT:
12339 IEM_MC_BEGIN(3, 0);
12340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12341 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12343 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12344 IEM_MC_REF_EFLAGS(pEFlags);
12345 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12347 IEM_MC_ADVANCE_RIP();
12348 IEM_MC_END();
12349 return VINF_SUCCESS;
12350
12351 case IEMMODE_64BIT:
12352 IEM_MC_BEGIN(3, 0);
12353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12354 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12356 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12357 IEM_MC_REF_EFLAGS(pEFlags);
12358 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12359 IEM_MC_ADVANCE_RIP();
12360 IEM_MC_END();
12361 return VINF_SUCCESS;
12362
12363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12364 }
12365 }
12366 else
12367 {
12368 /* memory */
12369 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12370 switch (pIemCpu->enmEffOpSize)
12371 {
12372 case IEMMODE_16BIT:
12373 IEM_MC_BEGIN(3, 2);
12374 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12375 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12378
12379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12380 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12381 IEM_MC_ASSIGN(cShiftArg, cShift);
12382 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12383 IEM_MC_FETCH_EFLAGS(EFlags);
12384 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12385
12386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12387 IEM_MC_COMMIT_EFLAGS(EFlags);
12388 IEM_MC_ADVANCE_RIP();
12389 IEM_MC_END();
12390 return VINF_SUCCESS;
12391
12392 case IEMMODE_32BIT:
12393 IEM_MC_BEGIN(3, 2);
12394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12395 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12398
12399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12400 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12401 IEM_MC_ASSIGN(cShiftArg, cShift);
12402 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12403 IEM_MC_FETCH_EFLAGS(EFlags);
12404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12405
12406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12407 IEM_MC_COMMIT_EFLAGS(EFlags);
12408 IEM_MC_ADVANCE_RIP();
12409 IEM_MC_END();
12410 return VINF_SUCCESS;
12411
12412 case IEMMODE_64BIT:
12413 IEM_MC_BEGIN(3, 2);
12414 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12415 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12416 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12418
12419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12420 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12421 IEM_MC_ASSIGN(cShiftArg, cShift);
12422 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12423 IEM_MC_FETCH_EFLAGS(EFlags);
12424 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12425
12426 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12427 IEM_MC_COMMIT_EFLAGS(EFlags);
12428 IEM_MC_ADVANCE_RIP();
12429 IEM_MC_END();
12430 return VINF_SUCCESS;
12431
12432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12433 }
12434 }
12435}
12436
12437
12438/** Opcode 0xc2. */
12439FNIEMOP_DEF(iemOp_retn_Iw)
12440{
12441 IEMOP_MNEMONIC("retn Iw");
12442 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12443 IEMOP_HLP_NO_LOCK_PREFIX();
12444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12445 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12446}
12447
12448
12449/** Opcode 0xc3. */
12450FNIEMOP_DEF(iemOp_retn)
12451{
12452 IEMOP_MNEMONIC("retn");
12453 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12454 IEMOP_HLP_NO_LOCK_PREFIX();
12455 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12456}
12457
12458
12459/** Opcode 0xc4. */
12460FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12461{
12462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12463 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12464 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12465 {
12466 IEMOP_MNEMONIC("2-byte-vex");
12467 /* The LES instruction is invalid 64-bit mode. In legacy and
12468 compatability mode it is invalid with MOD=3.
12469 The use as a VEX prefix is made possible by assigning the inverted
12470 REX.R to the top MOD bit, and the top bit in the inverted register
12471 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12472 to accessing registers 0..7 in this VEX form. */
12473 /** @todo VEX: Just use new tables for it. */
12474 return IEMOP_RAISE_INVALID_OPCODE();
12475 }
12476 IEMOP_MNEMONIC("les Gv,Mp");
12477 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12478}
12479
12480
12481/** Opcode 0xc5. */
12482FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12483{
12484 /* The LDS instruction is invalid 64-bit mode. In legacy and
12485 compatability mode it is invalid with MOD=3.
12486 The use as a VEX prefix is made possible by assigning the inverted
12487 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12488 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12490 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12491 {
12492 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12493 {
12494 IEMOP_MNEMONIC("lds Gv,Mp");
12495 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12496 }
12497 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12498 }
12499
12500 IEMOP_MNEMONIC("3-byte-vex");
12501 /** @todo Test when exctly the VEX conformance checks kick in during
12502 * instruction decoding and fetching (using \#PF). */
12503 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12504 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12505 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12506#if 0 /* will make sense of this next week... */
12507 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12508 &&
12509 )
12510 {
12511
12512 }
12513#endif
12514
12515 /** @todo VEX: Just use new tables for it. */
12516 return IEMOP_RAISE_INVALID_OPCODE();
12517}
12518
12519
12520/** Opcode 0xc6. */
12521FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12522{
12523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12524 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12525 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12526 return IEMOP_RAISE_INVALID_OPCODE();
12527 IEMOP_MNEMONIC("mov Eb,Ib");
12528
12529 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12530 {
12531 /* register access */
12532 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12533 IEM_MC_BEGIN(0, 0);
12534 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12535 IEM_MC_ADVANCE_RIP();
12536 IEM_MC_END();
12537 }
12538 else
12539 {
12540 /* memory access. */
12541 IEM_MC_BEGIN(0, 1);
12542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12544 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12545 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12546 IEM_MC_ADVANCE_RIP();
12547 IEM_MC_END();
12548 }
12549 return VINF_SUCCESS;
12550}
12551
12552
12553/** Opcode 0xc7. */
12554FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12555{
12556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12557 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12558 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12559 return IEMOP_RAISE_INVALID_OPCODE();
12560 IEMOP_MNEMONIC("mov Ev,Iz");
12561
12562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12563 {
12564 /* register access */
12565 switch (pIemCpu->enmEffOpSize)
12566 {
12567 case IEMMODE_16BIT:
12568 IEM_MC_BEGIN(0, 0);
12569 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12570 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12571 IEM_MC_ADVANCE_RIP();
12572 IEM_MC_END();
12573 return VINF_SUCCESS;
12574
12575 case IEMMODE_32BIT:
12576 IEM_MC_BEGIN(0, 0);
12577 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12578 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12579 IEM_MC_ADVANCE_RIP();
12580 IEM_MC_END();
12581 return VINF_SUCCESS;
12582
12583 case IEMMODE_64BIT:
12584 IEM_MC_BEGIN(0, 0);
12585 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12586 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12587 IEM_MC_ADVANCE_RIP();
12588 IEM_MC_END();
12589 return VINF_SUCCESS;
12590
12591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12592 }
12593 }
12594 else
12595 {
12596 /* memory access. */
12597 switch (pIemCpu->enmEffOpSize)
12598 {
12599 case IEMMODE_16BIT:
12600 IEM_MC_BEGIN(0, 1);
12601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12603 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12604 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12605 IEM_MC_ADVANCE_RIP();
12606 IEM_MC_END();
12607 return VINF_SUCCESS;
12608
12609 case IEMMODE_32BIT:
12610 IEM_MC_BEGIN(0, 1);
12611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12613 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12614 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12615 IEM_MC_ADVANCE_RIP();
12616 IEM_MC_END();
12617 return VINF_SUCCESS;
12618
12619 case IEMMODE_64BIT:
12620 IEM_MC_BEGIN(0, 1);
12621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12623 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12624 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12625 IEM_MC_ADVANCE_RIP();
12626 IEM_MC_END();
12627 return VINF_SUCCESS;
12628
12629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12630 }
12631 }
12632}
12633
12634
12635
12636
12637/** Opcode 0xc8. */
12638FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12639{
12640 IEMOP_MNEMONIC("enter Iw,Ib");
12641 IEMOP_HLP_MIN_186();
12642 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12643 IEMOP_HLP_NO_LOCK_PREFIX();
12644 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12645 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12646 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12647}
12648
12649
12650/** Opcode 0xc9. */
12651FNIEMOP_DEF(iemOp_leave)
12652{
12653 IEMOP_MNEMONIC("retn");
12654 IEMOP_HLP_MIN_186();
12655 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12656 IEMOP_HLP_NO_LOCK_PREFIX();
12657 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12658}
12659
12660
12661/** Opcode 0xca. */
12662FNIEMOP_DEF(iemOp_retf_Iw)
12663{
12664 IEMOP_MNEMONIC("retf Iw");
12665 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12666 IEMOP_HLP_NO_LOCK_PREFIX();
12667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12668 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12669}
12670
12671
12672/** Opcode 0xcb. */
12673FNIEMOP_DEF(iemOp_retf)
12674{
12675 IEMOP_MNEMONIC("retf");
12676 IEMOP_HLP_NO_LOCK_PREFIX();
12677 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12678 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12679}
12680
12681
12682/** Opcode 0xcc. */
12683FNIEMOP_DEF(iemOp_int_3)
12684{
12685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12686 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12687}
12688
12689
12690/** Opcode 0xcd. */
12691FNIEMOP_DEF(iemOp_int_Ib)
12692{
12693 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12695 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12696}
12697
12698
12699/** Opcode 0xce. */
12700FNIEMOP_DEF(iemOp_into)
12701{
12702 IEMOP_MNEMONIC("into");
12703 IEMOP_HLP_NO_64BIT();
12704
12705 IEM_MC_BEGIN(2, 0);
12706 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12707 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12708 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12709 IEM_MC_END();
12710 return VINF_SUCCESS;
12711}
12712
12713
12714/** Opcode 0xcf. */
12715FNIEMOP_DEF(iemOp_iret)
12716{
12717 IEMOP_MNEMONIC("iret");
12718 IEMOP_HLP_NO_LOCK_PREFIX();
12719 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12720}
12721
12722
12723/** Opcode 0xd0. */
12724FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12725{
12726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12727 PCIEMOPSHIFTSIZES pImpl;
12728 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12729 {
12730 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12731 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12732 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12733 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12734 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12735 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12736 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12737 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12738 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12739 }
12740 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12741
12742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12743 {
12744 /* register */
12745 IEMOP_HLP_NO_LOCK_PREFIX();
12746 IEM_MC_BEGIN(3, 0);
12747 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12748 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12750 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12751 IEM_MC_REF_EFLAGS(pEFlags);
12752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12753 IEM_MC_ADVANCE_RIP();
12754 IEM_MC_END();
12755 }
12756 else
12757 {
12758 /* memory */
12759 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12760 IEM_MC_BEGIN(3, 2);
12761 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12762 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12763 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12765
12766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12767 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12768 IEM_MC_FETCH_EFLAGS(EFlags);
12769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12770
12771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12772 IEM_MC_COMMIT_EFLAGS(EFlags);
12773 IEM_MC_ADVANCE_RIP();
12774 IEM_MC_END();
12775 }
12776 return VINF_SUCCESS;
12777}
12778
12779
12780
12781/** Opcode 0xd1. */
12782FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12783{
12784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12785 PCIEMOPSHIFTSIZES pImpl;
12786 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12787 {
12788 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12789 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12790 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12791 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12792 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12793 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12794 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12795 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12796 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12797 }
12798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12799
12800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12801 {
12802 /* register */
12803 IEMOP_HLP_NO_LOCK_PREFIX();
12804 switch (pIemCpu->enmEffOpSize)
12805 {
12806 case IEMMODE_16BIT:
12807 IEM_MC_BEGIN(3, 0);
12808 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12809 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12811 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12812 IEM_MC_REF_EFLAGS(pEFlags);
12813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12814 IEM_MC_ADVANCE_RIP();
12815 IEM_MC_END();
12816 return VINF_SUCCESS;
12817
12818 case IEMMODE_32BIT:
12819 IEM_MC_BEGIN(3, 0);
12820 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12821 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12822 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12823 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12824 IEM_MC_REF_EFLAGS(pEFlags);
12825 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12826 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12827 IEM_MC_ADVANCE_RIP();
12828 IEM_MC_END();
12829 return VINF_SUCCESS;
12830
12831 case IEMMODE_64BIT:
12832 IEM_MC_BEGIN(3, 0);
12833 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12834 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12835 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12836 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12837 IEM_MC_REF_EFLAGS(pEFlags);
12838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12839 IEM_MC_ADVANCE_RIP();
12840 IEM_MC_END();
12841 return VINF_SUCCESS;
12842
12843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12844 }
12845 }
12846 else
12847 {
12848 /* memory */
12849 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12850 switch (pIemCpu->enmEffOpSize)
12851 {
12852 case IEMMODE_16BIT:
12853 IEM_MC_BEGIN(3, 2);
12854 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12855 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12856 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12858
12859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12860 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12861 IEM_MC_FETCH_EFLAGS(EFlags);
12862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12863
12864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12865 IEM_MC_COMMIT_EFLAGS(EFlags);
12866 IEM_MC_ADVANCE_RIP();
12867 IEM_MC_END();
12868 return VINF_SUCCESS;
12869
12870 case IEMMODE_32BIT:
12871 IEM_MC_BEGIN(3, 2);
12872 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12873 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12874 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12876
12877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12878 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12879 IEM_MC_FETCH_EFLAGS(EFlags);
12880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12881
12882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12883 IEM_MC_COMMIT_EFLAGS(EFlags);
12884 IEM_MC_ADVANCE_RIP();
12885 IEM_MC_END();
12886 return VINF_SUCCESS;
12887
12888 case IEMMODE_64BIT:
12889 IEM_MC_BEGIN(3, 2);
12890 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12891 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12892 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12894
12895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12896 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12897 IEM_MC_FETCH_EFLAGS(EFlags);
12898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12899
12900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12901 IEM_MC_COMMIT_EFLAGS(EFlags);
12902 IEM_MC_ADVANCE_RIP();
12903 IEM_MC_END();
12904 return VINF_SUCCESS;
12905
12906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12907 }
12908 }
12909}
12910
12911
12912/** Opcode 0xd2. */
12913FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12914{
12915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12916 PCIEMOPSHIFTSIZES pImpl;
12917 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12918 {
12919 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12920 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12921 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12922 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12923 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12924 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12925 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12926 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12927 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12928 }
12929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12930
12931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12932 {
12933 /* register */
12934 IEMOP_HLP_NO_LOCK_PREFIX();
12935 IEM_MC_BEGIN(3, 0);
12936 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12937 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12939 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12940 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12941 IEM_MC_REF_EFLAGS(pEFlags);
12942 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12943 IEM_MC_ADVANCE_RIP();
12944 IEM_MC_END();
12945 }
12946 else
12947 {
12948 /* memory */
12949 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12950 IEM_MC_BEGIN(3, 2);
12951 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12952 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12953 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12955
12956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12957 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12958 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12959 IEM_MC_FETCH_EFLAGS(EFlags);
12960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12961
12962 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12963 IEM_MC_COMMIT_EFLAGS(EFlags);
12964 IEM_MC_ADVANCE_RIP();
12965 IEM_MC_END();
12966 }
12967 return VINF_SUCCESS;
12968}
12969
12970
12971/** Opcode 0xd3. */
12972FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12973{
12974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12975 PCIEMOPSHIFTSIZES pImpl;
12976 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12977 {
12978 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12979 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12980 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12981 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12982 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12983 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12984 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12985 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12986 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12987 }
12988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12989
12990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12991 {
12992 /* register */
12993 IEMOP_HLP_NO_LOCK_PREFIX();
12994 switch (pIemCpu->enmEffOpSize)
12995 {
12996 case IEMMODE_16BIT:
12997 IEM_MC_BEGIN(3, 0);
12998 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12999 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13000 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13001 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13002 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13003 IEM_MC_REF_EFLAGS(pEFlags);
13004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13005 IEM_MC_ADVANCE_RIP();
13006 IEM_MC_END();
13007 return VINF_SUCCESS;
13008
13009 case IEMMODE_32BIT:
13010 IEM_MC_BEGIN(3, 0);
13011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13012 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13014 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13015 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13016 IEM_MC_REF_EFLAGS(pEFlags);
13017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13018 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13019 IEM_MC_ADVANCE_RIP();
13020 IEM_MC_END();
13021 return VINF_SUCCESS;
13022
13023 case IEMMODE_64BIT:
13024 IEM_MC_BEGIN(3, 0);
13025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13026 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13028 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13029 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13030 IEM_MC_REF_EFLAGS(pEFlags);
13031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13032 IEM_MC_ADVANCE_RIP();
13033 IEM_MC_END();
13034 return VINF_SUCCESS;
13035
13036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13037 }
13038 }
13039 else
13040 {
13041 /* memory */
13042 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13043 switch (pIemCpu->enmEffOpSize)
13044 {
13045 case IEMMODE_16BIT:
13046 IEM_MC_BEGIN(3, 2);
13047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13048 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13049 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13051
13052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13053 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13054 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13055 IEM_MC_FETCH_EFLAGS(EFlags);
13056 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13057
13058 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13059 IEM_MC_COMMIT_EFLAGS(EFlags);
13060 IEM_MC_ADVANCE_RIP();
13061 IEM_MC_END();
13062 return VINF_SUCCESS;
13063
13064 case IEMMODE_32BIT:
13065 IEM_MC_BEGIN(3, 2);
13066 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13067 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13068 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13070
13071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13072 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13073 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13074 IEM_MC_FETCH_EFLAGS(EFlags);
13075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13076
13077 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13078 IEM_MC_COMMIT_EFLAGS(EFlags);
13079 IEM_MC_ADVANCE_RIP();
13080 IEM_MC_END();
13081 return VINF_SUCCESS;
13082
13083 case IEMMODE_64BIT:
13084 IEM_MC_BEGIN(3, 2);
13085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13086 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13087 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13089
13090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13091 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13092 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13093 IEM_MC_FETCH_EFLAGS(EFlags);
13094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13095
13096 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13097 IEM_MC_COMMIT_EFLAGS(EFlags);
13098 IEM_MC_ADVANCE_RIP();
13099 IEM_MC_END();
13100 return VINF_SUCCESS;
13101
13102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13103 }
13104 }
13105}
13106
13107/** Opcode 0xd4. */
13108FNIEMOP_DEF(iemOp_aam_Ib)
13109{
13110 IEMOP_MNEMONIC("aam Ib");
13111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13112 IEMOP_HLP_NO_LOCK_PREFIX();
13113 IEMOP_HLP_NO_64BIT();
13114 if (!bImm)
13115 return IEMOP_RAISE_DIVIDE_ERROR();
13116 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13117}
13118
13119
13120/** Opcode 0xd5. */
13121FNIEMOP_DEF(iemOp_aad_Ib)
13122{
13123 IEMOP_MNEMONIC("aad Ib");
13124 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13125 IEMOP_HLP_NO_LOCK_PREFIX();
13126 IEMOP_HLP_NO_64BIT();
13127 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13128}
13129
13130
13131/** Opcode 0xd6. */
13132FNIEMOP_DEF(iemOp_salc)
13133{
13134 IEMOP_MNEMONIC("salc");
13135 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13138 IEMOP_HLP_NO_64BIT();
13139
13140 IEM_MC_BEGIN(0, 0);
13141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13142 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13143 } IEM_MC_ELSE() {
13144 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13145 } IEM_MC_ENDIF();
13146 IEM_MC_ADVANCE_RIP();
13147 IEM_MC_END();
13148 return VINF_SUCCESS;
13149}
13150
13151
13152/** Opcode 0xd7. */
13153FNIEMOP_DEF(iemOp_xlat)
13154{
13155 IEMOP_MNEMONIC("xlat");
13156 IEMOP_HLP_NO_LOCK_PREFIX();
13157 switch (pIemCpu->enmEffAddrMode)
13158 {
13159 case IEMMODE_16BIT:
13160 IEM_MC_BEGIN(2, 0);
13161 IEM_MC_LOCAL(uint8_t, u8Tmp);
13162 IEM_MC_LOCAL(uint16_t, u16Addr);
13163 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13164 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13165 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
13166 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13167 IEM_MC_ADVANCE_RIP();
13168 IEM_MC_END();
13169 return VINF_SUCCESS;
13170
13171 case IEMMODE_32BIT:
13172 IEM_MC_BEGIN(2, 0);
13173 IEM_MC_LOCAL(uint8_t, u8Tmp);
13174 IEM_MC_LOCAL(uint32_t, u32Addr);
13175 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13176 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13177 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
13178 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13179 IEM_MC_ADVANCE_RIP();
13180 IEM_MC_END();
13181 return VINF_SUCCESS;
13182
13183 case IEMMODE_64BIT:
13184 IEM_MC_BEGIN(2, 0);
13185 IEM_MC_LOCAL(uint8_t, u8Tmp);
13186 IEM_MC_LOCAL(uint64_t, u64Addr);
13187 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13188 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13189 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
13190 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13191 IEM_MC_ADVANCE_RIP();
13192 IEM_MC_END();
13193 return VINF_SUCCESS;
13194
13195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13196 }
13197}
13198
13199
13200/**
13201 * Common worker for FPU instructions working on ST0 and STn, and storing the
13202 * result in ST0.
13203 *
13204 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13205 */
13206FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13207{
13208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13209
13210 IEM_MC_BEGIN(3, 1);
13211 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13212 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13213 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13214 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13215
13216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13218 IEM_MC_PREPARE_FPU_USAGE();
13219 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13220 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13221 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13222 IEM_MC_ELSE()
13223 IEM_MC_FPU_STACK_UNDERFLOW(0);
13224 IEM_MC_ENDIF();
13225 IEM_MC_ADVANCE_RIP();
13226
13227 IEM_MC_END();
13228 return VINF_SUCCESS;
13229}
13230
13231
13232/**
13233 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13234 * flags.
13235 *
13236 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13237 */
13238FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13239{
13240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13241
13242 IEM_MC_BEGIN(3, 1);
13243 IEM_MC_LOCAL(uint16_t, u16Fsw);
13244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13245 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13246 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13247
13248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13250 IEM_MC_PREPARE_FPU_USAGE();
13251 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13252 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13253 IEM_MC_UPDATE_FSW(u16Fsw);
13254 IEM_MC_ELSE()
13255 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13256 IEM_MC_ENDIF();
13257 IEM_MC_ADVANCE_RIP();
13258
13259 IEM_MC_END();
13260 return VINF_SUCCESS;
13261}
13262
13263
13264/**
13265 * Common worker for FPU instructions working on ST0 and STn, only affecting
13266 * flags, and popping when done.
13267 *
13268 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13269 */
13270FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13271{
13272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13273
13274 IEM_MC_BEGIN(3, 1);
13275 IEM_MC_LOCAL(uint16_t, u16Fsw);
13276 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13277 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13278 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13279
13280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13282 IEM_MC_PREPARE_FPU_USAGE();
13283 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13284 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13285 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13286 IEM_MC_ELSE()
13287 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13288 IEM_MC_ENDIF();
13289 IEM_MC_ADVANCE_RIP();
13290
13291 IEM_MC_END();
13292 return VINF_SUCCESS;
13293}
13294
13295
13296/** Opcode 0xd8 11/0. */
13297FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13298{
13299 IEMOP_MNEMONIC("fadd st0,stN");
13300 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13301}
13302
13303
13304/** Opcode 0xd8 11/1. */
13305FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13306{
13307 IEMOP_MNEMONIC("fmul st0,stN");
13308 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13309}
13310
13311
13312/** Opcode 0xd8 11/2. */
13313FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13314{
13315 IEMOP_MNEMONIC("fcom st0,stN");
13316 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13317}
13318
13319
13320/** Opcode 0xd8 11/3. */
13321FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13322{
13323 IEMOP_MNEMONIC("fcomp st0,stN");
13324 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13325}
13326
13327
13328/** Opcode 0xd8 11/4. */
13329FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13330{
13331 IEMOP_MNEMONIC("fsub st0,stN");
13332 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13333}
13334
13335
13336/** Opcode 0xd8 11/5. */
13337FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13338{
13339 IEMOP_MNEMONIC("fsubr st0,stN");
13340 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13341}
13342
13343
13344/** Opcode 0xd8 11/6. */
13345FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13346{
13347 IEMOP_MNEMONIC("fdiv st0,stN");
13348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13349}
13350
13351
13352/** Opcode 0xd8 11/7. */
13353FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13354{
13355 IEMOP_MNEMONIC("fdivr st0,stN");
13356 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13357}
13358
13359
13360/**
13361 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13362 * the result in ST0.
13363 *
13364 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13365 */
13366FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13367{
13368 IEM_MC_BEGIN(3, 3);
13369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13370 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13371 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13372 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13373 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13374 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13375
13376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13378
13379 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13380 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13381 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13382
13383 IEM_MC_PREPARE_FPU_USAGE();
13384 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13385 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13386 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13387 IEM_MC_ELSE()
13388 IEM_MC_FPU_STACK_UNDERFLOW(0);
13389 IEM_MC_ENDIF();
13390 IEM_MC_ADVANCE_RIP();
13391
13392 IEM_MC_END();
13393 return VINF_SUCCESS;
13394}
13395
13396
13397/** Opcode 0xd8 !11/0. */
13398FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13399{
13400 IEMOP_MNEMONIC("fadd st0,m32r");
13401 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13402}
13403
13404
13405/** Opcode 0xd8 !11/1. */
13406FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13407{
13408 IEMOP_MNEMONIC("fmul st0,m32r");
13409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13410}
13411
13412
13413/** Opcode 0xd8 !11/2. */
13414FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13415{
13416 IEMOP_MNEMONIC("fcom st0,m32r");
13417
13418 IEM_MC_BEGIN(3, 3);
13419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13420 IEM_MC_LOCAL(uint16_t, u16Fsw);
13421 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13422 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13424 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13425
13426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13428
13429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13431 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13432
13433 IEM_MC_PREPARE_FPU_USAGE();
13434 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13435 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13436 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13437 IEM_MC_ELSE()
13438 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13439 IEM_MC_ENDIF();
13440 IEM_MC_ADVANCE_RIP();
13441
13442 IEM_MC_END();
13443 return VINF_SUCCESS;
13444}
13445
13446
13447/** Opcode 0xd8 !11/3. */
13448FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13449{
13450 IEMOP_MNEMONIC("fcomp st0,m32r");
13451
13452 IEM_MC_BEGIN(3, 3);
13453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13454 IEM_MC_LOCAL(uint16_t, u16Fsw);
13455 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13456 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13458 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13459
13460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13462
13463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13465 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13466
13467 IEM_MC_PREPARE_FPU_USAGE();
13468 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13469 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13470 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13471 IEM_MC_ELSE()
13472 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13473 IEM_MC_ENDIF();
13474 IEM_MC_ADVANCE_RIP();
13475
13476 IEM_MC_END();
13477 return VINF_SUCCESS;
13478}
13479
13480
13481/** Opcode 0xd8 !11/4. */
13482FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13483{
13484 IEMOP_MNEMONIC("fsub st0,m32r");
13485 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13486}
13487
13488
13489/** Opcode 0xd8 !11/5. */
13490FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13491{
13492 IEMOP_MNEMONIC("fsubr st0,m32r");
13493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13494}
13495
13496
13497/** Opcode 0xd8 !11/6. */
13498FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13499{
13500 IEMOP_MNEMONIC("fdiv st0,m32r");
13501 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13502}
13503
13504
13505/** Opcode 0xd8 !11/7. */
13506FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13507{
13508 IEMOP_MNEMONIC("fdivr st0,m32r");
13509 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13510}
13511
13512
13513/** Opcode 0xd8. */
13514FNIEMOP_DEF(iemOp_EscF0)
13515{
13516 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13518
13519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13520 {
13521 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13522 {
13523 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13524 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13525 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13526 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13527 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13528 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13529 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13530 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13532 }
13533 }
13534 else
13535 {
13536 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13537 {
13538 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13539 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13540 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13541 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13542 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13543 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13544 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13545 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13547 }
13548 }
13549}
13550
13551
13552/** Opcode 0xd9 /0 mem32real
13553 * @sa iemOp_fld_m64r */
13554FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13555{
13556 IEMOP_MNEMONIC("fld m32r");
13557
13558 IEM_MC_BEGIN(2, 3);
13559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13560 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13561 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13562 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13563 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13564
13565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13567
13568 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13569 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13570 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13571
13572 IEM_MC_PREPARE_FPU_USAGE();
13573 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13574 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13575 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13576 IEM_MC_ELSE()
13577 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13578 IEM_MC_ENDIF();
13579 IEM_MC_ADVANCE_RIP();
13580
13581 IEM_MC_END();
13582 return VINF_SUCCESS;
13583}
13584
13585
13586/** Opcode 0xd9 !11/2 mem32real */
13587FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13588{
13589 IEMOP_MNEMONIC("fst m32r");
13590 IEM_MC_BEGIN(3, 2);
13591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13592 IEM_MC_LOCAL(uint16_t, u16Fsw);
13593 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13594 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13595 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13596
13597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13599 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13600 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13601
13602 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13603 IEM_MC_PREPARE_FPU_USAGE();
13604 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13605 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13606 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13607 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13608 IEM_MC_ELSE()
13609 IEM_MC_IF_FCW_IM()
13610 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13611 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13612 IEM_MC_ENDIF();
13613 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13614 IEM_MC_ENDIF();
13615 IEM_MC_ADVANCE_RIP();
13616
13617 IEM_MC_END();
13618 return VINF_SUCCESS;
13619}
13620
13621
13622/** Opcode 0xd9 !11/3 */
13623FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13624{
13625 IEMOP_MNEMONIC("fstp m32r");
13626 IEM_MC_BEGIN(3, 2);
13627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13628 IEM_MC_LOCAL(uint16_t, u16Fsw);
13629 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13630 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13631 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13632
13633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13635 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13636 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13637
13638 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13639 IEM_MC_PREPARE_FPU_USAGE();
13640 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13641 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13642 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13643 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13644 IEM_MC_ELSE()
13645 IEM_MC_IF_FCW_IM()
13646 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13647 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13648 IEM_MC_ENDIF();
13649 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13650 IEM_MC_ENDIF();
13651 IEM_MC_ADVANCE_RIP();
13652
13653 IEM_MC_END();
13654 return VINF_SUCCESS;
13655}
13656
13657
13658/** Opcode 0xd9 !11/4 */
13659FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13660{
13661 IEMOP_MNEMONIC("fldenv m14/28byte");
13662 IEM_MC_BEGIN(3, 0);
13663 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13664 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13665 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13669 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13670 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13671 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13672 IEM_MC_END();
13673 return VINF_SUCCESS;
13674}
13675
13676
13677/** Opcode 0xd9 !11/5 */
13678FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13679{
13680 IEMOP_MNEMONIC("fldcw m2byte");
13681 IEM_MC_BEGIN(1, 1);
13682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13683 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13687 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13688 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13689 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13690 IEM_MC_END();
13691 return VINF_SUCCESS;
13692}
13693
13694
13695/** Opcode 0xd9 !11/6 */
13696FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13697{
13698 IEMOP_MNEMONIC("fstenv m14/m28byte");
13699 IEM_MC_BEGIN(3, 0);
13700 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13701 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13702 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13705 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13706 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13707 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13708 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13709 IEM_MC_END();
13710 return VINF_SUCCESS;
13711}
13712
13713
13714/** Opcode 0xd9 !11/7 */
13715FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13716{
13717 IEMOP_MNEMONIC("fnstcw m2byte");
13718 IEM_MC_BEGIN(2, 0);
13719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13720 IEM_MC_LOCAL(uint16_t, u16Fcw);
13721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13725 IEM_MC_FETCH_FCW(u16Fcw);
13726 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13727 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13728 IEM_MC_END();
13729 return VINF_SUCCESS;
13730}
13731
13732
13733/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13734FNIEMOP_DEF(iemOp_fnop)
13735{
13736 IEMOP_MNEMONIC("fnop");
13737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13738
13739 IEM_MC_BEGIN(0, 0);
13740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13742 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13743 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13744 * intel optimizations. Investigate. */
13745 IEM_MC_UPDATE_FPU_OPCODE_IP();
13746 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13747 IEM_MC_END();
13748 return VINF_SUCCESS;
13749}
13750
13751
13752/** Opcode 0xd9 11/0 stN */
13753FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13754{
13755 IEMOP_MNEMONIC("fld stN");
13756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13757
13758 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13759 * indicates that it does. */
13760 IEM_MC_BEGIN(0, 2);
13761 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13762 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13765
13766 IEM_MC_PREPARE_FPU_USAGE();
13767 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13768 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13769 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13770 IEM_MC_ELSE()
13771 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13772 IEM_MC_ENDIF();
13773
13774 IEM_MC_ADVANCE_RIP();
13775 IEM_MC_END();
13776
13777 return VINF_SUCCESS;
13778}
13779
13780
13781/** Opcode 0xd9 11/3 stN */
13782FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13783{
13784 IEMOP_MNEMONIC("fxch stN");
13785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13786
13787 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13788 * indicates that it does. */
13789 IEM_MC_BEGIN(1, 3);
13790 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13791 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13792 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13793 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13796
13797 IEM_MC_PREPARE_FPU_USAGE();
13798 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13799 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13800 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13801 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13802 IEM_MC_ELSE()
13803 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13804 IEM_MC_ENDIF();
13805
13806 IEM_MC_ADVANCE_RIP();
13807 IEM_MC_END();
13808
13809 return VINF_SUCCESS;
13810}
13811
13812
13813/** Opcode 0xd9 11/4, 0xdd 11/2. */
13814FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13815{
13816 IEMOP_MNEMONIC("fstp st0,stN");
13817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13818
13819 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13820 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13821 if (!iDstReg)
13822 {
13823 IEM_MC_BEGIN(0, 1);
13824 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13827
13828 IEM_MC_PREPARE_FPU_USAGE();
13829 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13830 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13831 IEM_MC_ELSE()
13832 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13833 IEM_MC_ENDIF();
13834
13835 IEM_MC_ADVANCE_RIP();
13836 IEM_MC_END();
13837 }
13838 else
13839 {
13840 IEM_MC_BEGIN(0, 2);
13841 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13842 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13845
13846 IEM_MC_PREPARE_FPU_USAGE();
13847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13848 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13849 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13850 IEM_MC_ELSE()
13851 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13852 IEM_MC_ENDIF();
13853
13854 IEM_MC_ADVANCE_RIP();
13855 IEM_MC_END();
13856 }
13857 return VINF_SUCCESS;
13858}
13859
13860
13861/**
13862 * Common worker for FPU instructions working on ST0 and replaces it with the
13863 * result, i.e. unary operators.
13864 *
13865 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13866 */
13867FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13868{
13869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13870
13871 IEM_MC_BEGIN(2, 1);
13872 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13873 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13875
13876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13878 IEM_MC_PREPARE_FPU_USAGE();
13879 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13880 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13881 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13882 IEM_MC_ELSE()
13883 IEM_MC_FPU_STACK_UNDERFLOW(0);
13884 IEM_MC_ENDIF();
13885 IEM_MC_ADVANCE_RIP();
13886
13887 IEM_MC_END();
13888 return VINF_SUCCESS;
13889}
13890
13891
13892/** Opcode 0xd9 0xe0. */
13893FNIEMOP_DEF(iemOp_fchs)
13894{
13895 IEMOP_MNEMONIC("fchs st0");
13896 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13897}
13898
13899
13900/** Opcode 0xd9 0xe1. */
13901FNIEMOP_DEF(iemOp_fabs)
13902{
13903 IEMOP_MNEMONIC("fabs st0");
13904 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13905}
13906
13907
13908/**
13909 * Common worker for FPU instructions working on ST0 and only returns FSW.
13910 *
13911 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13912 */
13913FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13914{
13915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13916
13917 IEM_MC_BEGIN(2, 1);
13918 IEM_MC_LOCAL(uint16_t, u16Fsw);
13919 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13920 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13921
13922 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13923 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13924 IEM_MC_PREPARE_FPU_USAGE();
13925 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13926 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13927 IEM_MC_UPDATE_FSW(u16Fsw);
13928 IEM_MC_ELSE()
13929 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13930 IEM_MC_ENDIF();
13931 IEM_MC_ADVANCE_RIP();
13932
13933 IEM_MC_END();
13934 return VINF_SUCCESS;
13935}
13936
13937
13938/** Opcode 0xd9 0xe4. */
13939FNIEMOP_DEF(iemOp_ftst)
13940{
13941 IEMOP_MNEMONIC("ftst st0");
13942 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13943}
13944
13945
13946/** Opcode 0xd9 0xe5. */
13947FNIEMOP_DEF(iemOp_fxam)
13948{
13949 IEMOP_MNEMONIC("fxam st0");
13950 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13951}
13952
13953
13954/**
13955 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13956 *
13957 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13958 */
13959FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13960{
13961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13962
13963 IEM_MC_BEGIN(1, 1);
13964 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13965 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13966
13967 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13968 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13969 IEM_MC_PREPARE_FPU_USAGE();
13970 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13971 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13972 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13973 IEM_MC_ELSE()
13974 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13975 IEM_MC_ENDIF();
13976 IEM_MC_ADVANCE_RIP();
13977
13978 IEM_MC_END();
13979 return VINF_SUCCESS;
13980}
13981
13982
13983/** Opcode 0xd9 0xe8. */
13984FNIEMOP_DEF(iemOp_fld1)
13985{
13986 IEMOP_MNEMONIC("fld1");
13987 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13988}
13989
13990
13991/** Opcode 0xd9 0xe9. */
13992FNIEMOP_DEF(iemOp_fldl2t)
13993{
13994 IEMOP_MNEMONIC("fldl2t");
13995 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13996}
13997
13998
13999/** Opcode 0xd9 0xea. */
14000FNIEMOP_DEF(iemOp_fldl2e)
14001{
14002 IEMOP_MNEMONIC("fldl2e");
14003 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14004}
14005
14006/** Opcode 0xd9 0xeb. */
14007FNIEMOP_DEF(iemOp_fldpi)
14008{
14009 IEMOP_MNEMONIC("fldpi");
14010 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14011}
14012
14013
14014/** Opcode 0xd9 0xec. */
14015FNIEMOP_DEF(iemOp_fldlg2)
14016{
14017 IEMOP_MNEMONIC("fldlg2");
14018 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14019}
14020
14021/** Opcode 0xd9 0xed. */
14022FNIEMOP_DEF(iemOp_fldln2)
14023{
14024 IEMOP_MNEMONIC("fldln2");
14025 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14026}
14027
14028
14029/** Opcode 0xd9 0xee. */
14030FNIEMOP_DEF(iemOp_fldz)
14031{
14032 IEMOP_MNEMONIC("fldz");
14033 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14034}
14035
14036
14037/** Opcode 0xd9 0xf0. */
14038FNIEMOP_DEF(iemOp_f2xm1)
14039{
14040 IEMOP_MNEMONIC("f2xm1 st0");
14041 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14042}
14043
14044
14045/** Opcode 0xd9 0xf1. */
14046FNIEMOP_DEF(iemOp_fylx2)
14047{
14048 IEMOP_MNEMONIC("fylx2 st0");
14049 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
14050}
14051
14052
14053/**
14054 * Common worker for FPU instructions working on ST0 and having two outputs, one
14055 * replacing ST0 and one pushed onto the stack.
14056 *
14057 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14058 */
14059FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14060{
14061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14062
14063 IEM_MC_BEGIN(2, 1);
14064 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14065 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14066 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14067
14068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14070 IEM_MC_PREPARE_FPU_USAGE();
14071 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14072 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14073 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14074 IEM_MC_ELSE()
14075 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14076 IEM_MC_ENDIF();
14077 IEM_MC_ADVANCE_RIP();
14078
14079 IEM_MC_END();
14080 return VINF_SUCCESS;
14081}
14082
14083
14084/** Opcode 0xd9 0xf2. */
14085FNIEMOP_DEF(iemOp_fptan)
14086{
14087 IEMOP_MNEMONIC("fptan st0");
14088 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14089}
14090
14091
14092/**
14093 * Common worker for FPU instructions working on STn and ST0, storing the result
14094 * in STn, and popping the stack unless IE, DE or ZE was raised.
14095 *
14096 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14097 */
14098FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14099{
14100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14101
14102 IEM_MC_BEGIN(3, 1);
14103 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14104 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14106 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14107
14108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14110
14111 IEM_MC_PREPARE_FPU_USAGE();
14112 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14113 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14114 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14115 IEM_MC_ELSE()
14116 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14117 IEM_MC_ENDIF();
14118 IEM_MC_ADVANCE_RIP();
14119
14120 IEM_MC_END();
14121 return VINF_SUCCESS;
14122}
14123
14124
14125/** Opcode 0xd9 0xf3. */
14126FNIEMOP_DEF(iemOp_fpatan)
14127{
14128 IEMOP_MNEMONIC("fpatan st1,st0");
14129 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14130}
14131
14132
14133/** Opcode 0xd9 0xf4. */
14134FNIEMOP_DEF(iemOp_fxtract)
14135{
14136 IEMOP_MNEMONIC("fxtract st0");
14137 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14138}
14139
14140
14141/** Opcode 0xd9 0xf5. */
14142FNIEMOP_DEF(iemOp_fprem1)
14143{
14144 IEMOP_MNEMONIC("fprem1 st0, st1");
14145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14146}
14147
14148
14149/** Opcode 0xd9 0xf6. */
14150FNIEMOP_DEF(iemOp_fdecstp)
14151{
14152 IEMOP_MNEMONIC("fdecstp");
14153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14154 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14155 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14156 * FINCSTP and FDECSTP. */
14157
14158 IEM_MC_BEGIN(0,0);
14159
14160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14162
14163 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14164 IEM_MC_FPU_STACK_DEC_TOP();
14165 IEM_MC_UPDATE_FSW_CONST(0);
14166
14167 IEM_MC_ADVANCE_RIP();
14168 IEM_MC_END();
14169 return VINF_SUCCESS;
14170}
14171
14172
14173/** Opcode 0xd9 0xf7. */
14174FNIEMOP_DEF(iemOp_fincstp)
14175{
14176 IEMOP_MNEMONIC("fincstp");
14177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14178 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14179 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14180 * FINCSTP and FDECSTP. */
14181
14182 IEM_MC_BEGIN(0,0);
14183
14184 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14185 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14186
14187 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14188 IEM_MC_FPU_STACK_INC_TOP();
14189 IEM_MC_UPDATE_FSW_CONST(0);
14190
14191 IEM_MC_ADVANCE_RIP();
14192 IEM_MC_END();
14193 return VINF_SUCCESS;
14194}
14195
14196
14197/** Opcode 0xd9 0xf8. */
14198FNIEMOP_DEF(iemOp_fprem)
14199{
14200 IEMOP_MNEMONIC("fprem st0, st1");
14201 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14202}
14203
14204
14205/** Opcode 0xd9 0xf9. */
14206FNIEMOP_DEF(iemOp_fyl2xp1)
14207{
14208 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14209 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14210}
14211
14212
14213/** Opcode 0xd9 0xfa. */
14214FNIEMOP_DEF(iemOp_fsqrt)
14215{
14216 IEMOP_MNEMONIC("fsqrt st0");
14217 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14218}
14219
14220
14221/** Opcode 0xd9 0xfb. */
14222FNIEMOP_DEF(iemOp_fsincos)
14223{
14224 IEMOP_MNEMONIC("fsincos st0");
14225 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14226}
14227
14228
14229/** Opcode 0xd9 0xfc. */
14230FNIEMOP_DEF(iemOp_frndint)
14231{
14232 IEMOP_MNEMONIC("frndint st0");
14233 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14234}
14235
14236
14237/** Opcode 0xd9 0xfd. */
14238FNIEMOP_DEF(iemOp_fscale)
14239{
14240 IEMOP_MNEMONIC("fscale st0, st1");
14241 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14242}
14243
14244
14245/** Opcode 0xd9 0xfe. */
14246FNIEMOP_DEF(iemOp_fsin)
14247{
14248 IEMOP_MNEMONIC("fsin st0");
14249 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14250}
14251
14252
14253/** Opcode 0xd9 0xff. */
14254FNIEMOP_DEF(iemOp_fcos)
14255{
14256 IEMOP_MNEMONIC("fcos st0");
14257 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14258}
14259
14260
14261/** Used by iemOp_EscF1. */
14262static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14263{
14264 /* 0xe0 */ iemOp_fchs,
14265 /* 0xe1 */ iemOp_fabs,
14266 /* 0xe2 */ iemOp_Invalid,
14267 /* 0xe3 */ iemOp_Invalid,
14268 /* 0xe4 */ iemOp_ftst,
14269 /* 0xe5 */ iemOp_fxam,
14270 /* 0xe6 */ iemOp_Invalid,
14271 /* 0xe7 */ iemOp_Invalid,
14272 /* 0xe8 */ iemOp_fld1,
14273 /* 0xe9 */ iemOp_fldl2t,
14274 /* 0xea */ iemOp_fldl2e,
14275 /* 0xeb */ iemOp_fldpi,
14276 /* 0xec */ iemOp_fldlg2,
14277 /* 0xed */ iemOp_fldln2,
14278 /* 0xee */ iemOp_fldz,
14279 /* 0xef */ iemOp_Invalid,
14280 /* 0xf0 */ iemOp_f2xm1,
14281 /* 0xf1 */ iemOp_fylx2,
14282 /* 0xf2 */ iemOp_fptan,
14283 /* 0xf3 */ iemOp_fpatan,
14284 /* 0xf4 */ iemOp_fxtract,
14285 /* 0xf5 */ iemOp_fprem1,
14286 /* 0xf6 */ iemOp_fdecstp,
14287 /* 0xf7 */ iemOp_fincstp,
14288 /* 0xf8 */ iemOp_fprem,
14289 /* 0xf9 */ iemOp_fyl2xp1,
14290 /* 0xfa */ iemOp_fsqrt,
14291 /* 0xfb */ iemOp_fsincos,
14292 /* 0xfc */ iemOp_frndint,
14293 /* 0xfd */ iemOp_fscale,
14294 /* 0xfe */ iemOp_fsin,
14295 /* 0xff */ iemOp_fcos
14296};
14297
14298
14299/** Opcode 0xd9. */
14300FNIEMOP_DEF(iemOp_EscF1)
14301{
14302 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14305 {
14306 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14307 {
14308 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14309 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14310 case 2:
14311 if (bRm == 0xd0)
14312 return FNIEMOP_CALL(iemOp_fnop);
14313 return IEMOP_RAISE_INVALID_OPCODE();
14314 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14315 case 4:
14316 case 5:
14317 case 6:
14318 case 7:
14319 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14320 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14322 }
14323 }
14324 else
14325 {
14326 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14327 {
14328 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14329 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14330 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14331 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14332 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14333 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14334 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14335 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14337 }
14338 }
14339}
14340
14341
14342/** Opcode 0xda 11/0. */
14343FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14344{
14345 IEMOP_MNEMONIC("fcmovb st0,stN");
14346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14347
14348 IEM_MC_BEGIN(0, 1);
14349 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14350
14351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14353
14354 IEM_MC_PREPARE_FPU_USAGE();
14355 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14357 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14358 IEM_MC_ENDIF();
14359 IEM_MC_UPDATE_FPU_OPCODE_IP();
14360 IEM_MC_ELSE()
14361 IEM_MC_FPU_STACK_UNDERFLOW(0);
14362 IEM_MC_ENDIF();
14363 IEM_MC_ADVANCE_RIP();
14364
14365 IEM_MC_END();
14366 return VINF_SUCCESS;
14367}
14368
14369
14370/** Opcode 0xda 11/1. */
14371FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14372{
14373 IEMOP_MNEMONIC("fcmove st0,stN");
14374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14375
14376 IEM_MC_BEGIN(0, 1);
14377 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14378
14379 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14380 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14381
14382 IEM_MC_PREPARE_FPU_USAGE();
14383 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14385 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14386 IEM_MC_ENDIF();
14387 IEM_MC_UPDATE_FPU_OPCODE_IP();
14388 IEM_MC_ELSE()
14389 IEM_MC_FPU_STACK_UNDERFLOW(0);
14390 IEM_MC_ENDIF();
14391 IEM_MC_ADVANCE_RIP();
14392
14393 IEM_MC_END();
14394 return VINF_SUCCESS;
14395}
14396
14397
14398/** Opcode 0xda 11/2. */
14399FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14400{
14401 IEMOP_MNEMONIC("fcmovbe st0,stN");
14402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14403
14404 IEM_MC_BEGIN(0, 1);
14405 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14406
14407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14409
14410 IEM_MC_PREPARE_FPU_USAGE();
14411 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14412 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14413 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14414 IEM_MC_ENDIF();
14415 IEM_MC_UPDATE_FPU_OPCODE_IP();
14416 IEM_MC_ELSE()
14417 IEM_MC_FPU_STACK_UNDERFLOW(0);
14418 IEM_MC_ENDIF();
14419 IEM_MC_ADVANCE_RIP();
14420
14421 IEM_MC_END();
14422 return VINF_SUCCESS;
14423}
14424
14425
14426/** Opcode 0xda 11/3. */
14427FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14428{
14429 IEMOP_MNEMONIC("fcmovu st0,stN");
14430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14431
14432 IEM_MC_BEGIN(0, 1);
14433 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14434
14435 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14436 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14437
14438 IEM_MC_PREPARE_FPU_USAGE();
14439 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14441 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14442 IEM_MC_ENDIF();
14443 IEM_MC_UPDATE_FPU_OPCODE_IP();
14444 IEM_MC_ELSE()
14445 IEM_MC_FPU_STACK_UNDERFLOW(0);
14446 IEM_MC_ENDIF();
14447 IEM_MC_ADVANCE_RIP();
14448
14449 IEM_MC_END();
14450 return VINF_SUCCESS;
14451}
14452
14453
14454/**
14455 * Common worker for FPU instructions working on ST0 and STn, only affecting
14456 * flags, and popping twice when done.
14457 *
14458 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14459 */
14460FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14461{
14462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14463
14464 IEM_MC_BEGIN(3, 1);
14465 IEM_MC_LOCAL(uint16_t, u16Fsw);
14466 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14467 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14468 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14469
14470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14472
14473 IEM_MC_PREPARE_FPU_USAGE();
14474 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14475 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14476 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14477 IEM_MC_ELSE()
14478 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14479 IEM_MC_ENDIF();
14480 IEM_MC_ADVANCE_RIP();
14481
14482 IEM_MC_END();
14483 return VINF_SUCCESS;
14484}
14485
14486
14487/** Opcode 0xda 0xe9. */
14488FNIEMOP_DEF(iemOp_fucompp)
14489{
14490 IEMOP_MNEMONIC("fucompp st0,stN");
14491 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14492}
14493
14494
14495/**
14496 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14497 * the result in ST0.
14498 *
14499 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14500 */
14501FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14502{
14503 IEM_MC_BEGIN(3, 3);
14504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14505 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14506 IEM_MC_LOCAL(int32_t, i32Val2);
14507 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14509 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14510
14511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14513
14514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14516 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14517
14518 IEM_MC_PREPARE_FPU_USAGE();
14519 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14520 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14521 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14522 IEM_MC_ELSE()
14523 IEM_MC_FPU_STACK_UNDERFLOW(0);
14524 IEM_MC_ENDIF();
14525 IEM_MC_ADVANCE_RIP();
14526
14527 IEM_MC_END();
14528 return VINF_SUCCESS;
14529}
14530
14531
14532/** Opcode 0xda !11/0. */
14533FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14534{
14535 IEMOP_MNEMONIC("fiadd m32i");
14536 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14537}
14538
14539
14540/** Opcode 0xda !11/1. */
14541FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14542{
14543 IEMOP_MNEMONIC("fimul m32i");
14544 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14545}
14546
14547
14548/** Opcode 0xda !11/2. */
14549FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14550{
14551 IEMOP_MNEMONIC("ficom st0,m32i");
14552
14553 IEM_MC_BEGIN(3, 3);
14554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14555 IEM_MC_LOCAL(uint16_t, u16Fsw);
14556 IEM_MC_LOCAL(int32_t, i32Val2);
14557 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14558 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14559 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14560
14561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14563
14564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14565 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14566 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14567
14568 IEM_MC_PREPARE_FPU_USAGE();
14569 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14570 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14571 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14572 IEM_MC_ELSE()
14573 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14574 IEM_MC_ENDIF();
14575 IEM_MC_ADVANCE_RIP();
14576
14577 IEM_MC_END();
14578 return VINF_SUCCESS;
14579}
14580
14581
14582/** Opcode 0xda !11/3. */
14583FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14584{
14585 IEMOP_MNEMONIC("ficomp st0,m32i");
14586
14587 IEM_MC_BEGIN(3, 3);
14588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14589 IEM_MC_LOCAL(uint16_t, u16Fsw);
14590 IEM_MC_LOCAL(int32_t, i32Val2);
14591 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14592 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14593 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14594
14595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14597
14598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14599 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14600 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14601
14602 IEM_MC_PREPARE_FPU_USAGE();
14603 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14604 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14605 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14606 IEM_MC_ELSE()
14607 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14608 IEM_MC_ENDIF();
14609 IEM_MC_ADVANCE_RIP();
14610
14611 IEM_MC_END();
14612 return VINF_SUCCESS;
14613}
14614
14615
14616/** Opcode 0xda !11/4. */
14617FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14618{
14619 IEMOP_MNEMONIC("fisub m32i");
14620 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14621}
14622
14623
14624/** Opcode 0xda !11/5. */
14625FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14626{
14627 IEMOP_MNEMONIC("fisubr m32i");
14628 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14629}
14630
14631
14632/** Opcode 0xda !11/6. */
14633FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14634{
14635 IEMOP_MNEMONIC("fidiv m32i");
14636 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14637}
14638
14639
14640/** Opcode 0xda !11/7. */
14641FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14642{
14643 IEMOP_MNEMONIC("fidivr m32i");
14644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14645}
14646
14647
14648/** Opcode 0xda. */
14649FNIEMOP_DEF(iemOp_EscF2)
14650{
14651 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14654 {
14655 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14656 {
14657 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14658 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14659 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14660 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14661 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14662 case 5:
14663 if (bRm == 0xe9)
14664 return FNIEMOP_CALL(iemOp_fucompp);
14665 return IEMOP_RAISE_INVALID_OPCODE();
14666 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14667 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14669 }
14670 }
14671 else
14672 {
14673 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14674 {
14675 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14676 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14677 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14678 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14679 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14680 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14681 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14682 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14684 }
14685 }
14686}
14687
14688
14689/** Opcode 0xdb !11/0. */
14690FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14691{
14692 IEMOP_MNEMONIC("fild m32i");
14693
14694 IEM_MC_BEGIN(2, 3);
14695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14697 IEM_MC_LOCAL(int32_t, i32Val);
14698 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14699 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14700
14701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14703
14704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14706 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14707
14708 IEM_MC_PREPARE_FPU_USAGE();
14709 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14710 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14711 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14712 IEM_MC_ELSE()
14713 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14714 IEM_MC_ENDIF();
14715 IEM_MC_ADVANCE_RIP();
14716
14717 IEM_MC_END();
14718 return VINF_SUCCESS;
14719}
14720
14721
14722/** Opcode 0xdb !11/1. */
14723FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14724{
14725 IEMOP_MNEMONIC("fisttp m32i");
14726 IEM_MC_BEGIN(3, 2);
14727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14728 IEM_MC_LOCAL(uint16_t, u16Fsw);
14729 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14730 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14731 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14732
14733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14735 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14736 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14737
14738 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14739 IEM_MC_PREPARE_FPU_USAGE();
14740 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14741 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14742 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14743 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14744 IEM_MC_ELSE()
14745 IEM_MC_IF_FCW_IM()
14746 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14747 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14748 IEM_MC_ENDIF();
14749 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14750 IEM_MC_ENDIF();
14751 IEM_MC_ADVANCE_RIP();
14752
14753 IEM_MC_END();
14754 return VINF_SUCCESS;
14755}
14756
14757
14758/** Opcode 0xdb !11/2. */
14759FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14760{
14761 IEMOP_MNEMONIC("fist m32i");
14762 IEM_MC_BEGIN(3, 2);
14763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14764 IEM_MC_LOCAL(uint16_t, u16Fsw);
14765 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14766 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14767 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14768
14769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14773
14774 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14775 IEM_MC_PREPARE_FPU_USAGE();
14776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14777 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14778 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14779 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14780 IEM_MC_ELSE()
14781 IEM_MC_IF_FCW_IM()
14782 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14783 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14784 IEM_MC_ENDIF();
14785 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14786 IEM_MC_ENDIF();
14787 IEM_MC_ADVANCE_RIP();
14788
14789 IEM_MC_END();
14790 return VINF_SUCCESS;
14791}
14792
14793
14794/** Opcode 0xdb !11/3. */
14795FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14796{
14797 IEMOP_MNEMONIC("fisttp m32i");
14798 IEM_MC_BEGIN(3, 2);
14799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14800 IEM_MC_LOCAL(uint16_t, u16Fsw);
14801 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14802 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14803 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14804
14805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14809
14810 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14811 IEM_MC_PREPARE_FPU_USAGE();
14812 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14813 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14814 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14815 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14816 IEM_MC_ELSE()
14817 IEM_MC_IF_FCW_IM()
14818 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14819 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14820 IEM_MC_ENDIF();
14821 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14822 IEM_MC_ENDIF();
14823 IEM_MC_ADVANCE_RIP();
14824
14825 IEM_MC_END();
14826 return VINF_SUCCESS;
14827}
14828
14829
14830/** Opcode 0xdb !11/5. */
14831FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14832{
14833 IEMOP_MNEMONIC("fld m80r");
14834
14835 IEM_MC_BEGIN(2, 3);
14836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14837 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14838 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14839 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14840 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14841
14842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14844
14845 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14846 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14847 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14848
14849 IEM_MC_PREPARE_FPU_USAGE();
14850 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14851 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14852 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14853 IEM_MC_ELSE()
14854 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14855 IEM_MC_ENDIF();
14856 IEM_MC_ADVANCE_RIP();
14857
14858 IEM_MC_END();
14859 return VINF_SUCCESS;
14860}
14861
14862
14863/** Opcode 0xdb !11/7. */
14864FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14865{
14866 IEMOP_MNEMONIC("fstp m80r");
14867 IEM_MC_BEGIN(3, 2);
14868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14869 IEM_MC_LOCAL(uint16_t, u16Fsw);
14870 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14871 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14872 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14873
14874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14878
14879 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14880 IEM_MC_PREPARE_FPU_USAGE();
14881 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14882 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14883 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14884 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14885 IEM_MC_ELSE()
14886 IEM_MC_IF_FCW_IM()
14887 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14888 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14889 IEM_MC_ENDIF();
14890 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14891 IEM_MC_ENDIF();
14892 IEM_MC_ADVANCE_RIP();
14893
14894 IEM_MC_END();
14895 return VINF_SUCCESS;
14896}
14897
14898
14899/** Opcode 0xdb 11/0. */
14900FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14901{
14902 IEMOP_MNEMONIC("fcmovnb st0,stN");
14903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14904
14905 IEM_MC_BEGIN(0, 1);
14906 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14907
14908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14910
14911 IEM_MC_PREPARE_FPU_USAGE();
14912 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14913 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14914 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14915 IEM_MC_ENDIF();
14916 IEM_MC_UPDATE_FPU_OPCODE_IP();
14917 IEM_MC_ELSE()
14918 IEM_MC_FPU_STACK_UNDERFLOW(0);
14919 IEM_MC_ENDIF();
14920 IEM_MC_ADVANCE_RIP();
14921
14922 IEM_MC_END();
14923 return VINF_SUCCESS;
14924}
14925
14926
14927/** Opcode 0xdb 11/1. */
14928FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC("fcmovne st0,stN");
14931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14932
14933 IEM_MC_BEGIN(0, 1);
14934 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14935
14936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14938
14939 IEM_MC_PREPARE_FPU_USAGE();
14940 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14941 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14942 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14943 IEM_MC_ENDIF();
14944 IEM_MC_UPDATE_FPU_OPCODE_IP();
14945 IEM_MC_ELSE()
14946 IEM_MC_FPU_STACK_UNDERFLOW(0);
14947 IEM_MC_ENDIF();
14948 IEM_MC_ADVANCE_RIP();
14949
14950 IEM_MC_END();
14951 return VINF_SUCCESS;
14952}
14953
14954
14955/** Opcode 0xdb 11/2. */
14956FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14957{
14958 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14960
14961 IEM_MC_BEGIN(0, 1);
14962 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14963
14964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14966
14967 IEM_MC_PREPARE_FPU_USAGE();
14968 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14969 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14970 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14971 IEM_MC_ENDIF();
14972 IEM_MC_UPDATE_FPU_OPCODE_IP();
14973 IEM_MC_ELSE()
14974 IEM_MC_FPU_STACK_UNDERFLOW(0);
14975 IEM_MC_ENDIF();
14976 IEM_MC_ADVANCE_RIP();
14977
14978 IEM_MC_END();
14979 return VINF_SUCCESS;
14980}
14981
14982
14983/** Opcode 0xdb 11/3. */
14984FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14985{
14986 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14988
14989 IEM_MC_BEGIN(0, 1);
14990 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14991
14992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14993 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14994
14995 IEM_MC_PREPARE_FPU_USAGE();
14996 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14997 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14998 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14999 IEM_MC_ENDIF();
15000 IEM_MC_UPDATE_FPU_OPCODE_IP();
15001 IEM_MC_ELSE()
15002 IEM_MC_FPU_STACK_UNDERFLOW(0);
15003 IEM_MC_ENDIF();
15004 IEM_MC_ADVANCE_RIP();
15005
15006 IEM_MC_END();
15007 return VINF_SUCCESS;
15008}
15009
15010
15011/** Opcode 0xdb 0xe0. */
15012FNIEMOP_DEF(iemOp_fneni)
15013{
15014 IEMOP_MNEMONIC("fneni (8087/ign)");
15015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15016 IEM_MC_BEGIN(0,0);
15017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15018 IEM_MC_ADVANCE_RIP();
15019 IEM_MC_END();
15020 return VINF_SUCCESS;
15021}
15022
15023
15024/** Opcode 0xdb 0xe1. */
15025FNIEMOP_DEF(iemOp_fndisi)
15026{
15027 IEMOP_MNEMONIC("fndisi (8087/ign)");
15028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15029 IEM_MC_BEGIN(0,0);
15030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15031 IEM_MC_ADVANCE_RIP();
15032 IEM_MC_END();
15033 return VINF_SUCCESS;
15034}
15035
15036
15037/** Opcode 0xdb 0xe2. */
15038FNIEMOP_DEF(iemOp_fnclex)
15039{
15040 IEMOP_MNEMONIC("fnclex");
15041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15042
15043 IEM_MC_BEGIN(0,0);
15044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15046 IEM_MC_CLEAR_FSW_EX();
15047 IEM_MC_ADVANCE_RIP();
15048 IEM_MC_END();
15049 return VINF_SUCCESS;
15050}
15051
15052
15053/** Opcode 0xdb 0xe3. */
15054FNIEMOP_DEF(iemOp_fninit)
15055{
15056 IEMOP_MNEMONIC("fninit");
15057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15058 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15059}
15060
15061
15062/** Opcode 0xdb 0xe4. */
15063FNIEMOP_DEF(iemOp_fnsetpm)
15064{
15065 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15067 IEM_MC_BEGIN(0,0);
15068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15069 IEM_MC_ADVANCE_RIP();
15070 IEM_MC_END();
15071 return VINF_SUCCESS;
15072}
15073
15074
15075/** Opcode 0xdb 0xe5. */
15076FNIEMOP_DEF(iemOp_frstpm)
15077{
15078 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15079#if 0 /* #UDs on newer CPUs */
15080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15081 IEM_MC_BEGIN(0,0);
15082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15083 IEM_MC_ADVANCE_RIP();
15084 IEM_MC_END();
15085 return VINF_SUCCESS;
15086#else
15087 return IEMOP_RAISE_INVALID_OPCODE();
15088#endif
15089}
15090
15091
15092/** Opcode 0xdb 11/5. */
15093FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15094{
15095 IEMOP_MNEMONIC("fucomi st0,stN");
15096 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15097}
15098
15099
15100/** Opcode 0xdb 11/6. */
15101FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15102{
15103 IEMOP_MNEMONIC("fcomi st0,stN");
15104 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15105}
15106
15107
15108/** Opcode 0xdb. */
15109FNIEMOP_DEF(iemOp_EscF3)
15110{
15111 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15113 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15114 {
15115 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15116 {
15117 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15118 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15119 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15120 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15121 case 4:
15122 switch (bRm)
15123 {
15124 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15125 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15126 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15127 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15128 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15129 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15130 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15131 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15133 }
15134 break;
15135 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15136 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15137 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15139 }
15140 }
15141 else
15142 {
15143 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15144 {
15145 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15146 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15147 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15148 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15149 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15150 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15151 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15152 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15154 }
15155 }
15156}
15157
15158
15159/**
15160 * Common worker for FPU instructions working on STn and ST0, and storing the
15161 * result in STn unless IE, DE or ZE was raised.
15162 *
15163 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15164 */
15165FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15166{
15167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15168
15169 IEM_MC_BEGIN(3, 1);
15170 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15171 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15173 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15174
15175 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15176 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15177
15178 IEM_MC_PREPARE_FPU_USAGE();
15179 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15180 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15181 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15182 IEM_MC_ELSE()
15183 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15184 IEM_MC_ENDIF();
15185 IEM_MC_ADVANCE_RIP();
15186
15187 IEM_MC_END();
15188 return VINF_SUCCESS;
15189}
15190
15191
15192/** Opcode 0xdc 11/0. */
15193FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15194{
15195 IEMOP_MNEMONIC("fadd stN,st0");
15196 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15197}
15198
15199
15200/** Opcode 0xdc 11/1. */
15201FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15202{
15203 IEMOP_MNEMONIC("fmul stN,st0");
15204 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15205}
15206
15207
15208/** Opcode 0xdc 11/4. */
15209FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15210{
15211 IEMOP_MNEMONIC("fsubr stN,st0");
15212 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15213}
15214
15215
15216/** Opcode 0xdc 11/5. */
15217FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15218{
15219 IEMOP_MNEMONIC("fsub stN,st0");
15220 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15221}
15222
15223
15224/** Opcode 0xdc 11/6. */
15225FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15226{
15227 IEMOP_MNEMONIC("fdivr stN,st0");
15228 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15229}
15230
15231
15232/** Opcode 0xdc 11/7. */
15233FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15234{
15235 IEMOP_MNEMONIC("fdiv stN,st0");
15236 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15237}
15238
15239
15240/**
15241 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15242 * memory operand, and storing the result in ST0.
15243 *
15244 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15245 */
15246FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15247{
15248 IEM_MC_BEGIN(3, 3);
15249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15250 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15251 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15252 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15253 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15254 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15255
15256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15260
15261 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15262 IEM_MC_PREPARE_FPU_USAGE();
15263 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15264 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15265 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15266 IEM_MC_ELSE()
15267 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15268 IEM_MC_ENDIF();
15269 IEM_MC_ADVANCE_RIP();
15270
15271 IEM_MC_END();
15272 return VINF_SUCCESS;
15273}
15274
15275
15276/** Opcode 0xdc !11/0. */
15277FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15278{
15279 IEMOP_MNEMONIC("fadd m64r");
15280 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15281}
15282
15283
15284/** Opcode 0xdc !11/1. */
15285FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15286{
15287 IEMOP_MNEMONIC("fmul m64r");
15288 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15289}
15290
15291
15292/** Opcode 0xdc !11/2. */
15293FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15294{
15295 IEMOP_MNEMONIC("fcom st0,m64r");
15296
15297 IEM_MC_BEGIN(3, 3);
15298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15299 IEM_MC_LOCAL(uint16_t, u16Fsw);
15300 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15301 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15303 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15304
15305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15307
15308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15310 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15311
15312 IEM_MC_PREPARE_FPU_USAGE();
15313 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15314 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15315 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15316 IEM_MC_ELSE()
15317 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15318 IEM_MC_ENDIF();
15319 IEM_MC_ADVANCE_RIP();
15320
15321 IEM_MC_END();
15322 return VINF_SUCCESS;
15323}
15324
15325
15326/** Opcode 0xdc !11/3. */
15327FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15328{
15329 IEMOP_MNEMONIC("fcomp st0,m64r");
15330
15331 IEM_MC_BEGIN(3, 3);
15332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15333 IEM_MC_LOCAL(uint16_t, u16Fsw);
15334 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15338
15339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15341
15342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15344 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15345
15346 IEM_MC_PREPARE_FPU_USAGE();
15347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15349 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15350 IEM_MC_ELSE()
15351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15352 IEM_MC_ENDIF();
15353 IEM_MC_ADVANCE_RIP();
15354
15355 IEM_MC_END();
15356 return VINF_SUCCESS;
15357}
15358
15359
15360/** Opcode 0xdc !11/4. */
15361FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15362{
15363 IEMOP_MNEMONIC("fsub m64r");
15364 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15365}
15366
15367
15368/** Opcode 0xdc !11/5. */
15369FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15370{
15371 IEMOP_MNEMONIC("fsubr m64r");
15372 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15373}
15374
15375
15376/** Opcode 0xdc !11/6. */
15377FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15378{
15379 IEMOP_MNEMONIC("fdiv m64r");
15380 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15381}
15382
15383
15384/** Opcode 0xdc !11/7. */
15385FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15386{
15387 IEMOP_MNEMONIC("fdivr m64r");
15388 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15389}
15390
15391
15392/** Opcode 0xdc. */
15393FNIEMOP_DEF(iemOp_EscF4)
15394{
15395 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15398 {
15399 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15400 {
15401 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15402 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15403 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15404 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15405 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15406 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15407 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15408 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15410 }
15411 }
15412 else
15413 {
15414 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15415 {
15416 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15417 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15418 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15419 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15420 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15421 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15422 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15423 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15425 }
15426 }
15427}
15428
15429
15430/** Opcode 0xdd !11/0.
15431 * @sa iemOp_fld_m32r */
15432FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15433{
15434 IEMOP_MNEMONIC("fld m64r");
15435
15436 IEM_MC_BEGIN(2, 3);
15437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15438 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15439 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15440 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15441 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15442
15443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15447
15448 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15449 IEM_MC_PREPARE_FPU_USAGE();
15450 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15451 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15452 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15453 IEM_MC_ELSE()
15454 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15455 IEM_MC_ENDIF();
15456 IEM_MC_ADVANCE_RIP();
15457
15458 IEM_MC_END();
15459 return VINF_SUCCESS;
15460}
15461
15462
15463/** Opcode 0xdd !11/0. */
15464FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15465{
15466 IEMOP_MNEMONIC("fisttp m64i");
15467 IEM_MC_BEGIN(3, 2);
15468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15469 IEM_MC_LOCAL(uint16_t, u16Fsw);
15470 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15471 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15472 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15473
15474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15478
15479 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15480 IEM_MC_PREPARE_FPU_USAGE();
15481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15483 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15484 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15485 IEM_MC_ELSE()
15486 IEM_MC_IF_FCW_IM()
15487 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15488 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15489 IEM_MC_ENDIF();
15490 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15491 IEM_MC_ENDIF();
15492 IEM_MC_ADVANCE_RIP();
15493
15494 IEM_MC_END();
15495 return VINF_SUCCESS;
15496}
15497
15498
15499/** Opcode 0xdd !11/0. */
15500FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15501{
15502 IEMOP_MNEMONIC("fst m64r");
15503 IEM_MC_BEGIN(3, 2);
15504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15505 IEM_MC_LOCAL(uint16_t, u16Fsw);
15506 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15507 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15509
15510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15513 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15514
15515 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15516 IEM_MC_PREPARE_FPU_USAGE();
15517 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15518 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15519 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15520 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15521 IEM_MC_ELSE()
15522 IEM_MC_IF_FCW_IM()
15523 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15524 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15525 IEM_MC_ENDIF();
15526 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15527 IEM_MC_ENDIF();
15528 IEM_MC_ADVANCE_RIP();
15529
15530 IEM_MC_END();
15531 return VINF_SUCCESS;
15532}
15533
15534
15535
15536
15537/** Opcode 0xdd !11/0. */
15538FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15539{
15540 IEMOP_MNEMONIC("fstp m64r");
15541 IEM_MC_BEGIN(3, 2);
15542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15543 IEM_MC_LOCAL(uint16_t, u16Fsw);
15544 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15545 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15546 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15547
15548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15551 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15552
15553 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15554 IEM_MC_PREPARE_FPU_USAGE();
15555 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15556 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15557 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15558 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15559 IEM_MC_ELSE()
15560 IEM_MC_IF_FCW_IM()
15561 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15562 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15563 IEM_MC_ENDIF();
15564 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15565 IEM_MC_ENDIF();
15566 IEM_MC_ADVANCE_RIP();
15567
15568 IEM_MC_END();
15569 return VINF_SUCCESS;
15570}
15571
15572
15573/** Opcode 0xdd !11/0. */
15574FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15575{
15576 IEMOP_MNEMONIC("frstor m94/108byte");
15577 IEM_MC_BEGIN(3, 0);
15578 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15579 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15580 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15584 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15585 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15586 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15587 IEM_MC_END();
15588 return VINF_SUCCESS;
15589}
15590
15591
15592/** Opcode 0xdd !11/0. */
15593FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15594{
15595 IEMOP_MNEMONIC("fnsave m94/108byte");
15596 IEM_MC_BEGIN(3, 0);
15597 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15598 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15599 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15603 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15604 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15605 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15606 IEM_MC_END();
15607 return VINF_SUCCESS;
15608
15609}
15610
15611/** Opcode 0xdd !11/0. */
15612FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15613{
15614 IEMOP_MNEMONIC("fnstsw m16");
15615
15616 IEM_MC_BEGIN(0, 2);
15617 IEM_MC_LOCAL(uint16_t, u16Tmp);
15618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15619
15620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15623
15624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15625 IEM_MC_FETCH_FSW(u16Tmp);
15626 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15627 IEM_MC_ADVANCE_RIP();
15628
15629/** @todo Debug / drop a hint to the verifier that things may differ
15630 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15631 * NT4SP1. (X86_FSW_PE) */
15632 IEM_MC_END();
15633 return VINF_SUCCESS;
15634}
15635
15636
15637/** Opcode 0xdd 11/0. */
15638FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15639{
15640 IEMOP_MNEMONIC("ffree stN");
15641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15642 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15643 unmodified. */
15644
15645 IEM_MC_BEGIN(0, 0);
15646
15647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15649
15650 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15651 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15652 IEM_MC_UPDATE_FPU_OPCODE_IP();
15653
15654 IEM_MC_ADVANCE_RIP();
15655 IEM_MC_END();
15656 return VINF_SUCCESS;
15657}
15658
15659
15660/** Opcode 0xdd 11/1. */
15661FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15662{
15663 IEMOP_MNEMONIC("fst st0,stN");
15664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15665
15666 IEM_MC_BEGIN(0, 2);
15667 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15668 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15671
15672 IEM_MC_PREPARE_FPU_USAGE();
15673 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15674 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15675 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15676 IEM_MC_ELSE()
15677 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15678 IEM_MC_ENDIF();
15679
15680 IEM_MC_ADVANCE_RIP();
15681 IEM_MC_END();
15682 return VINF_SUCCESS;
15683}
15684
15685
15686/** Opcode 0xdd 11/3. */
15687FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15688{
15689 IEMOP_MNEMONIC("fcom st0,stN");
15690 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15691}
15692
15693
15694/** Opcode 0xdd 11/4. */
15695FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15696{
15697 IEMOP_MNEMONIC("fcomp st0,stN");
15698 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15699}
15700
15701
15702/** Opcode 0xdd. */
15703FNIEMOP_DEF(iemOp_EscF5)
15704{
15705 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15706 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15708 {
15709 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15710 {
15711 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15712 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15713 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15714 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15715 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15716 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15717 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15718 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15720 }
15721 }
15722 else
15723 {
15724 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15725 {
15726 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15727 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15728 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15729 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15730 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15731 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15732 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15733 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15735 }
15736 }
15737}
15738
15739
15740/** Opcode 0xde 11/0. */
15741FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15742{
15743 IEMOP_MNEMONIC("faddp stN,st0");
15744 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15745}
15746
15747
15748/** Opcode 0xde 11/0. */
15749FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15750{
15751 IEMOP_MNEMONIC("fmulp stN,st0");
15752 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15753}
15754
15755
15756/** Opcode 0xde 0xd9. */
15757FNIEMOP_DEF(iemOp_fcompp)
15758{
15759 IEMOP_MNEMONIC("fucompp st0,stN");
15760 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15761}
15762
15763
15764/** Opcode 0xde 11/4. */
15765FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15766{
15767 IEMOP_MNEMONIC("fsubrp stN,st0");
15768 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15769}
15770
15771
15772/** Opcode 0xde 11/5. */
15773FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15774{
15775 IEMOP_MNEMONIC("fsubp stN,st0");
15776 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15777}
15778
15779
15780/** Opcode 0xde 11/6. */
15781FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15782{
15783 IEMOP_MNEMONIC("fdivrp stN,st0");
15784 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15785}
15786
15787
15788/** Opcode 0xde 11/7. */
15789FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15790{
15791 IEMOP_MNEMONIC("fdivp stN,st0");
15792 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15793}
15794
15795
15796/**
15797 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15798 * the result in ST0.
15799 *
15800 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15801 */
15802FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15803{
15804 IEM_MC_BEGIN(3, 3);
15805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15806 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15807 IEM_MC_LOCAL(int16_t, i16Val2);
15808 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15809 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15810 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15811
15812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15814
15815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15817 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15818
15819 IEM_MC_PREPARE_FPU_USAGE();
15820 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15821 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15822 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15823 IEM_MC_ELSE()
15824 IEM_MC_FPU_STACK_UNDERFLOW(0);
15825 IEM_MC_ENDIF();
15826 IEM_MC_ADVANCE_RIP();
15827
15828 IEM_MC_END();
15829 return VINF_SUCCESS;
15830}
15831
15832
15833/** Opcode 0xde !11/0. */
15834FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15835{
15836 IEMOP_MNEMONIC("fiadd m16i");
15837 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15838}
15839
15840
15841/** Opcode 0xde !11/1. */
15842FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15843{
15844 IEMOP_MNEMONIC("fimul m16i");
15845 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15846}
15847
15848
15849/** Opcode 0xde !11/2. */
15850FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15851{
15852 IEMOP_MNEMONIC("ficom st0,m16i");
15853
15854 IEM_MC_BEGIN(3, 3);
15855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15856 IEM_MC_LOCAL(uint16_t, u16Fsw);
15857 IEM_MC_LOCAL(int16_t, i16Val2);
15858 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15859 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15860 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15861
15862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15864
15865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15867 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15868
15869 IEM_MC_PREPARE_FPU_USAGE();
15870 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15871 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15872 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15873 IEM_MC_ELSE()
15874 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15875 IEM_MC_ENDIF();
15876 IEM_MC_ADVANCE_RIP();
15877
15878 IEM_MC_END();
15879 return VINF_SUCCESS;
15880}
15881
15882
15883/** Opcode 0xde !11/3. */
15884FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15885{
15886 IEMOP_MNEMONIC("ficomp st0,m16i");
15887
15888 IEM_MC_BEGIN(3, 3);
15889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15890 IEM_MC_LOCAL(uint16_t, u16Fsw);
15891 IEM_MC_LOCAL(int16_t, i16Val2);
15892 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15893 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15894 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15895
15896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15898
15899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15901 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15902
15903 IEM_MC_PREPARE_FPU_USAGE();
15904 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15905 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15906 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15907 IEM_MC_ELSE()
15908 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15909 IEM_MC_ENDIF();
15910 IEM_MC_ADVANCE_RIP();
15911
15912 IEM_MC_END();
15913 return VINF_SUCCESS;
15914}
15915
15916
15917/** Opcode 0xde !11/4. */
15918FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15919{
15920 IEMOP_MNEMONIC("fisub m16i");
15921 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15922}
15923
15924
15925/** Opcode 0xde !11/5. */
15926FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15927{
15928 IEMOP_MNEMONIC("fisubr m16i");
15929 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15930}
15931
15932
15933/** Opcode 0xde !11/6. */
15934FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15935{
15936 IEMOP_MNEMONIC("fiadd m16i");
15937 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15938}
15939
15940
15941/** Opcode 0xde !11/7. */
15942FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15943{
15944 IEMOP_MNEMONIC("fiadd m16i");
15945 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15946}
15947
15948
15949/** Opcode 0xde. */
15950FNIEMOP_DEF(iemOp_EscF6)
15951{
15952 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15955 {
15956 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15957 {
15958 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15959 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15960 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15961 case 3: if (bRm == 0xd9)
15962 return FNIEMOP_CALL(iemOp_fcompp);
15963 return IEMOP_RAISE_INVALID_OPCODE();
15964 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15965 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15966 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15967 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15969 }
15970 }
15971 else
15972 {
15973 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15974 {
15975 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15976 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15977 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15978 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15979 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15980 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15981 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15982 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15984 }
15985 }
15986}
15987
15988
15989/** Opcode 0xdf 11/0.
15990 * Undocument instruction, assumed to work like ffree + fincstp. */
15991FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15992{
15993 IEMOP_MNEMONIC("ffreep stN");
15994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15995
15996 IEM_MC_BEGIN(0, 0);
15997
15998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16000
16001 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16002 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16003 IEM_MC_FPU_STACK_INC_TOP();
16004 IEM_MC_UPDATE_FPU_OPCODE_IP();
16005
16006 IEM_MC_ADVANCE_RIP();
16007 IEM_MC_END();
16008 return VINF_SUCCESS;
16009}
16010
16011
16012/** Opcode 0xdf 0xe0. */
16013FNIEMOP_DEF(iemOp_fnstsw_ax)
16014{
16015 IEMOP_MNEMONIC("fnstsw ax");
16016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16017
16018 IEM_MC_BEGIN(0, 1);
16019 IEM_MC_LOCAL(uint16_t, u16Tmp);
16020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16021 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16022 IEM_MC_FETCH_FSW(u16Tmp);
16023 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16024 IEM_MC_ADVANCE_RIP();
16025 IEM_MC_END();
16026 return VINF_SUCCESS;
16027}
16028
16029
16030/** Opcode 0xdf 11/5. */
16031FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16032{
16033 IEMOP_MNEMONIC("fcomip st0,stN");
16034 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16035}
16036
16037
16038/** Opcode 0xdf 11/6. */
16039FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16040{
16041 IEMOP_MNEMONIC("fcomip st0,stN");
16042 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16043}
16044
16045
16046/** Opcode 0xdf !11/0. */
16047FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16048{
16049 IEMOP_MNEMONIC("fild m16i");
16050
16051 IEM_MC_BEGIN(2, 3);
16052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16053 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16054 IEM_MC_LOCAL(int16_t, i16Val);
16055 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16056 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16057
16058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16060
16061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16063 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16064
16065 IEM_MC_PREPARE_FPU_USAGE();
16066 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16067 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16068 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16069 IEM_MC_ELSE()
16070 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16071 IEM_MC_ENDIF();
16072 IEM_MC_ADVANCE_RIP();
16073
16074 IEM_MC_END();
16075 return VINF_SUCCESS;
16076}
16077
16078
16079/** Opcode 0xdf !11/1. */
16080FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16081{
16082 IEMOP_MNEMONIC("fisttp m16i");
16083 IEM_MC_BEGIN(3, 2);
16084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16085 IEM_MC_LOCAL(uint16_t, u16Fsw);
16086 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16087 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16089
16090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16094
16095 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16096 IEM_MC_PREPARE_FPU_USAGE();
16097 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16098 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16099 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16100 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16101 IEM_MC_ELSE()
16102 IEM_MC_IF_FCW_IM()
16103 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16104 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16105 IEM_MC_ENDIF();
16106 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16107 IEM_MC_ENDIF();
16108 IEM_MC_ADVANCE_RIP();
16109
16110 IEM_MC_END();
16111 return VINF_SUCCESS;
16112}
16113
16114
16115/** Opcode 0xdf !11/2. */
16116FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16117{
16118 IEMOP_MNEMONIC("fistp m16i");
16119 IEM_MC_BEGIN(3, 2);
16120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16121 IEM_MC_LOCAL(uint16_t, u16Fsw);
16122 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16123 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16125
16126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16130
16131 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16132 IEM_MC_PREPARE_FPU_USAGE();
16133 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16134 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16135 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16136 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16137 IEM_MC_ELSE()
16138 IEM_MC_IF_FCW_IM()
16139 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16140 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16141 IEM_MC_ENDIF();
16142 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16143 IEM_MC_ENDIF();
16144 IEM_MC_ADVANCE_RIP();
16145
16146 IEM_MC_END();
16147 return VINF_SUCCESS;
16148}
16149
16150
16151/** Opcode 0xdf !11/3. */
16152FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16153{
16154 IEMOP_MNEMONIC("fistp m16i");
16155 IEM_MC_BEGIN(3, 2);
16156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16157 IEM_MC_LOCAL(uint16_t, u16Fsw);
16158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16159 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16161
16162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16166
16167 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16168 IEM_MC_PREPARE_FPU_USAGE();
16169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16171 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16172 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16173 IEM_MC_ELSE()
16174 IEM_MC_IF_FCW_IM()
16175 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16176 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16177 IEM_MC_ENDIF();
16178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16179 IEM_MC_ENDIF();
16180 IEM_MC_ADVANCE_RIP();
16181
16182 IEM_MC_END();
16183 return VINF_SUCCESS;
16184}
16185
16186
16187/** Opcode 0xdf !11/4. */
16188FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16189
16190
16191/** Opcode 0xdf !11/5. */
16192FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16193{
16194 IEMOP_MNEMONIC("fild m64i");
16195
16196 IEM_MC_BEGIN(2, 3);
16197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16198 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16199 IEM_MC_LOCAL(int64_t, i64Val);
16200 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16201 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16202
16203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16205
16206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16208 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16209
16210 IEM_MC_PREPARE_FPU_USAGE();
16211 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16212 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16213 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16214 IEM_MC_ELSE()
16215 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16216 IEM_MC_ENDIF();
16217 IEM_MC_ADVANCE_RIP();
16218
16219 IEM_MC_END();
16220 return VINF_SUCCESS;
16221}
16222
16223
16224/** Opcode 0xdf !11/6. */
16225FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16226
16227
16228/** Opcode 0xdf !11/7. */
16229FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16230{
16231 IEMOP_MNEMONIC("fistp m64i");
16232 IEM_MC_BEGIN(3, 2);
16233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16234 IEM_MC_LOCAL(uint16_t, u16Fsw);
16235 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16236 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16237 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16238
16239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16241 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16242 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16243
16244 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16245 IEM_MC_PREPARE_FPU_USAGE();
16246 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16247 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16248 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16249 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16250 IEM_MC_ELSE()
16251 IEM_MC_IF_FCW_IM()
16252 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16253 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16254 IEM_MC_ENDIF();
16255 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16256 IEM_MC_ENDIF();
16257 IEM_MC_ADVANCE_RIP();
16258
16259 IEM_MC_END();
16260 return VINF_SUCCESS;
16261}
16262
16263
16264/** Opcode 0xdf. */
16265FNIEMOP_DEF(iemOp_EscF7)
16266{
16267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16269 {
16270 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16271 {
16272 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16273 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16274 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16275 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16276 case 4: if (bRm == 0xe0)
16277 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16278 return IEMOP_RAISE_INVALID_OPCODE();
16279 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16280 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16281 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16283 }
16284 }
16285 else
16286 {
16287 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16288 {
16289 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16290 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16291 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16292 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16293 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16294 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16295 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16296 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16298 }
16299 }
16300}
16301
16302
16303/** Opcode 0xe0. */
16304FNIEMOP_DEF(iemOp_loopne_Jb)
16305{
16306 IEMOP_MNEMONIC("loopne Jb");
16307 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16308 IEMOP_HLP_NO_LOCK_PREFIX();
16309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16310
16311 switch (pIemCpu->enmEffAddrMode)
16312 {
16313 case IEMMODE_16BIT:
16314 IEM_MC_BEGIN(0,0);
16315 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16316 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16317 IEM_MC_REL_JMP_S8(i8Imm);
16318 } IEM_MC_ELSE() {
16319 IEM_MC_ADVANCE_RIP();
16320 } IEM_MC_ENDIF();
16321 IEM_MC_END();
16322 return VINF_SUCCESS;
16323
16324 case IEMMODE_32BIT:
16325 IEM_MC_BEGIN(0,0);
16326 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16327 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16328 IEM_MC_REL_JMP_S8(i8Imm);
16329 } IEM_MC_ELSE() {
16330 IEM_MC_ADVANCE_RIP();
16331 } IEM_MC_ENDIF();
16332 IEM_MC_END();
16333 return VINF_SUCCESS;
16334
16335 case IEMMODE_64BIT:
16336 IEM_MC_BEGIN(0,0);
16337 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16338 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16339 IEM_MC_REL_JMP_S8(i8Imm);
16340 } IEM_MC_ELSE() {
16341 IEM_MC_ADVANCE_RIP();
16342 } IEM_MC_ENDIF();
16343 IEM_MC_END();
16344 return VINF_SUCCESS;
16345
16346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16347 }
16348}
16349
16350
16351/** Opcode 0xe1. */
16352FNIEMOP_DEF(iemOp_loope_Jb)
16353{
16354 IEMOP_MNEMONIC("loope Jb");
16355 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16356 IEMOP_HLP_NO_LOCK_PREFIX();
16357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16358
16359 switch (pIemCpu->enmEffAddrMode)
16360 {
16361 case IEMMODE_16BIT:
16362 IEM_MC_BEGIN(0,0);
16363 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16364 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16365 IEM_MC_REL_JMP_S8(i8Imm);
16366 } IEM_MC_ELSE() {
16367 IEM_MC_ADVANCE_RIP();
16368 } IEM_MC_ENDIF();
16369 IEM_MC_END();
16370 return VINF_SUCCESS;
16371
16372 case IEMMODE_32BIT:
16373 IEM_MC_BEGIN(0,0);
16374 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16375 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16376 IEM_MC_REL_JMP_S8(i8Imm);
16377 } IEM_MC_ELSE() {
16378 IEM_MC_ADVANCE_RIP();
16379 } IEM_MC_ENDIF();
16380 IEM_MC_END();
16381 return VINF_SUCCESS;
16382
16383 case IEMMODE_64BIT:
16384 IEM_MC_BEGIN(0,0);
16385 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16386 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16387 IEM_MC_REL_JMP_S8(i8Imm);
16388 } IEM_MC_ELSE() {
16389 IEM_MC_ADVANCE_RIP();
16390 } IEM_MC_ENDIF();
16391 IEM_MC_END();
16392 return VINF_SUCCESS;
16393
16394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16395 }
16396}
16397
16398
16399/** Opcode 0xe2. */
16400FNIEMOP_DEF(iemOp_loop_Jb)
16401{
16402 IEMOP_MNEMONIC("loop Jb");
16403 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16404 IEMOP_HLP_NO_LOCK_PREFIX();
16405 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16406
16407 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16408 * using the 32-bit operand size override. How can that be restarted? See
16409 * weird pseudo code in intel manual. */
16410 switch (pIemCpu->enmEffAddrMode)
16411 {
16412 case IEMMODE_16BIT:
16413 IEM_MC_BEGIN(0,0);
16414 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16415 {
16416 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16417 IEM_MC_IF_CX_IS_NZ() {
16418 IEM_MC_REL_JMP_S8(i8Imm);
16419 } IEM_MC_ELSE() {
16420 IEM_MC_ADVANCE_RIP();
16421 } IEM_MC_ENDIF();
16422 }
16423 else
16424 {
16425 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16426 IEM_MC_ADVANCE_RIP();
16427 }
16428 IEM_MC_END();
16429 return VINF_SUCCESS;
16430
16431 case IEMMODE_32BIT:
16432 IEM_MC_BEGIN(0,0);
16433 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16434 {
16435 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16436 IEM_MC_IF_ECX_IS_NZ() {
16437 IEM_MC_REL_JMP_S8(i8Imm);
16438 } IEM_MC_ELSE() {
16439 IEM_MC_ADVANCE_RIP();
16440 } IEM_MC_ENDIF();
16441 }
16442 else
16443 {
16444 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16445 IEM_MC_ADVANCE_RIP();
16446 }
16447 IEM_MC_END();
16448 return VINF_SUCCESS;
16449
16450 case IEMMODE_64BIT:
16451 IEM_MC_BEGIN(0,0);
16452 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16453 {
16454 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16455 IEM_MC_IF_RCX_IS_NZ() {
16456 IEM_MC_REL_JMP_S8(i8Imm);
16457 } IEM_MC_ELSE() {
16458 IEM_MC_ADVANCE_RIP();
16459 } IEM_MC_ENDIF();
16460 }
16461 else
16462 {
16463 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16464 IEM_MC_ADVANCE_RIP();
16465 }
16466 IEM_MC_END();
16467 return VINF_SUCCESS;
16468
16469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16470 }
16471}
16472
16473
16474/** Opcode 0xe3. */
16475FNIEMOP_DEF(iemOp_jecxz_Jb)
16476{
16477 IEMOP_MNEMONIC("jecxz Jb");
16478 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16479 IEMOP_HLP_NO_LOCK_PREFIX();
16480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16481
16482 switch (pIemCpu->enmEffAddrMode)
16483 {
16484 case IEMMODE_16BIT:
16485 IEM_MC_BEGIN(0,0);
16486 IEM_MC_IF_CX_IS_NZ() {
16487 IEM_MC_ADVANCE_RIP();
16488 } IEM_MC_ELSE() {
16489 IEM_MC_REL_JMP_S8(i8Imm);
16490 } IEM_MC_ENDIF();
16491 IEM_MC_END();
16492 return VINF_SUCCESS;
16493
16494 case IEMMODE_32BIT:
16495 IEM_MC_BEGIN(0,0);
16496 IEM_MC_IF_ECX_IS_NZ() {
16497 IEM_MC_ADVANCE_RIP();
16498 } IEM_MC_ELSE() {
16499 IEM_MC_REL_JMP_S8(i8Imm);
16500 } IEM_MC_ENDIF();
16501 IEM_MC_END();
16502 return VINF_SUCCESS;
16503
16504 case IEMMODE_64BIT:
16505 IEM_MC_BEGIN(0,0);
16506 IEM_MC_IF_RCX_IS_NZ() {
16507 IEM_MC_ADVANCE_RIP();
16508 } IEM_MC_ELSE() {
16509 IEM_MC_REL_JMP_S8(i8Imm);
16510 } IEM_MC_ENDIF();
16511 IEM_MC_END();
16512 return VINF_SUCCESS;
16513
16514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16515 }
16516}
16517
16518
16519/** Opcode 0xe4 */
16520FNIEMOP_DEF(iemOp_in_AL_Ib)
16521{
16522 IEMOP_MNEMONIC("in eAX,Ib");
16523 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16524 IEMOP_HLP_NO_LOCK_PREFIX();
16525 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16526}
16527
16528
16529/** Opcode 0xe5 */
16530FNIEMOP_DEF(iemOp_in_eAX_Ib)
16531{
16532 IEMOP_MNEMONIC("in eAX,Ib");
16533 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16534 IEMOP_HLP_NO_LOCK_PREFIX();
16535 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16536}
16537
16538
16539/** Opcode 0xe6 */
16540FNIEMOP_DEF(iemOp_out_Ib_AL)
16541{
16542 IEMOP_MNEMONIC("out Ib,AL");
16543 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16544 IEMOP_HLP_NO_LOCK_PREFIX();
16545 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16546}
16547
16548
16549/** Opcode 0xe7 */
16550FNIEMOP_DEF(iemOp_out_Ib_eAX)
16551{
16552 IEMOP_MNEMONIC("out Ib,eAX");
16553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16554 IEMOP_HLP_NO_LOCK_PREFIX();
16555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16556}
16557
16558
16559/** Opcode 0xe8. */
16560FNIEMOP_DEF(iemOp_call_Jv)
16561{
16562 IEMOP_MNEMONIC("call Jv");
16563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16564 switch (pIemCpu->enmEffOpSize)
16565 {
16566 case IEMMODE_16BIT:
16567 {
16568 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16569 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16570 }
16571
16572 case IEMMODE_32BIT:
16573 {
16574 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16575 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16576 }
16577
16578 case IEMMODE_64BIT:
16579 {
16580 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16581 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16582 }
16583
16584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16585 }
16586}
16587
16588
16589/** Opcode 0xe9. */
16590FNIEMOP_DEF(iemOp_jmp_Jv)
16591{
16592 IEMOP_MNEMONIC("jmp Jv");
16593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16594 switch (pIemCpu->enmEffOpSize)
16595 {
16596 case IEMMODE_16BIT:
16597 {
16598 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16599 IEM_MC_BEGIN(0, 0);
16600 IEM_MC_REL_JMP_S16(i16Imm);
16601 IEM_MC_END();
16602 return VINF_SUCCESS;
16603 }
16604
16605 case IEMMODE_64BIT:
16606 case IEMMODE_32BIT:
16607 {
16608 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16609 IEM_MC_BEGIN(0, 0);
16610 IEM_MC_REL_JMP_S32(i32Imm);
16611 IEM_MC_END();
16612 return VINF_SUCCESS;
16613 }
16614
16615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16616 }
16617}
16618
16619
16620/** Opcode 0xea. */
16621FNIEMOP_DEF(iemOp_jmp_Ap)
16622{
16623 IEMOP_MNEMONIC("jmp Ap");
16624 IEMOP_HLP_NO_64BIT();
16625
16626 /* Decode the far pointer address and pass it on to the far call C implementation. */
16627 uint32_t offSeg;
16628 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16629 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16630 else
16631 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16632 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16633 IEMOP_HLP_NO_LOCK_PREFIX();
16634 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16635}
16636
16637
16638/** Opcode 0xeb. */
16639FNIEMOP_DEF(iemOp_jmp_Jb)
16640{
16641 IEMOP_MNEMONIC("jmp Jb");
16642 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16643 IEMOP_HLP_NO_LOCK_PREFIX();
16644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16645
16646 IEM_MC_BEGIN(0, 0);
16647 IEM_MC_REL_JMP_S8(i8Imm);
16648 IEM_MC_END();
16649 return VINF_SUCCESS;
16650}
16651
16652
16653/** Opcode 0xec */
16654FNIEMOP_DEF(iemOp_in_AL_DX)
16655{
16656 IEMOP_MNEMONIC("in AL,DX");
16657 IEMOP_HLP_NO_LOCK_PREFIX();
16658 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16659}
16660
16661
16662/** Opcode 0xed */
16663FNIEMOP_DEF(iemOp_eAX_DX)
16664{
16665 IEMOP_MNEMONIC("in eAX,DX");
16666 IEMOP_HLP_NO_LOCK_PREFIX();
16667 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16668}
16669
16670
16671/** Opcode 0xee */
16672FNIEMOP_DEF(iemOp_out_DX_AL)
16673{
16674 IEMOP_MNEMONIC("out DX,AL");
16675 IEMOP_HLP_NO_LOCK_PREFIX();
16676 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16677}
16678
16679
16680/** Opcode 0xef */
16681FNIEMOP_DEF(iemOp_out_DX_eAX)
16682{
16683 IEMOP_MNEMONIC("out DX,eAX");
16684 IEMOP_HLP_NO_LOCK_PREFIX();
16685 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16686}
16687
16688
16689/** Opcode 0xf0. */
16690FNIEMOP_DEF(iemOp_lock)
16691{
16692 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16693 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16694
16695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16697}
16698
16699
16700/** Opcode 0xf1. */
16701FNIEMOP_DEF(iemOp_int_1)
16702{
16703 IEMOP_MNEMONIC("int1"); /* icebp */
16704 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16705 /** @todo testcase! */
16706 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16707}
16708
16709
16710/** Opcode 0xf2. */
16711FNIEMOP_DEF(iemOp_repne)
16712{
16713 /* This overrides any previous REPE prefix. */
16714 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16715 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16716 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16717
16718 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16719 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16720}
16721
16722
16723/** Opcode 0xf3. */
16724FNIEMOP_DEF(iemOp_repe)
16725{
16726 /* This overrides any previous REPNE prefix. */
16727 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16728 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16729 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16730
16731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16733}
16734
16735
16736/** Opcode 0xf4. */
16737FNIEMOP_DEF(iemOp_hlt)
16738{
16739 IEMOP_HLP_NO_LOCK_PREFIX();
16740 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16741}
16742
16743
16744/** Opcode 0xf5. */
16745FNIEMOP_DEF(iemOp_cmc)
16746{
16747 IEMOP_MNEMONIC("cmc");
16748 IEMOP_HLP_NO_LOCK_PREFIX();
16749 IEM_MC_BEGIN(0, 0);
16750 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16751 IEM_MC_ADVANCE_RIP();
16752 IEM_MC_END();
16753 return VINF_SUCCESS;
16754}
16755
16756
16757/**
16758 * Common implementation of 'inc/dec/not/neg Eb'.
16759 *
16760 * @param bRm The RM byte.
16761 * @param pImpl The instruction implementation.
16762 */
16763FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16764{
16765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16766 {
16767 /* register access */
16768 IEM_MC_BEGIN(2, 0);
16769 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16770 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16771 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16772 IEM_MC_REF_EFLAGS(pEFlags);
16773 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16774 IEM_MC_ADVANCE_RIP();
16775 IEM_MC_END();
16776 }
16777 else
16778 {
16779 /* memory access. */
16780 IEM_MC_BEGIN(2, 2);
16781 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16784
16785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16786 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16787 IEM_MC_FETCH_EFLAGS(EFlags);
16788 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16789 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16790 else
16791 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16792
16793 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16794 IEM_MC_COMMIT_EFLAGS(EFlags);
16795 IEM_MC_ADVANCE_RIP();
16796 IEM_MC_END();
16797 }
16798 return VINF_SUCCESS;
16799}
16800
16801
16802/**
16803 * Common implementation of 'inc/dec/not/neg Ev'.
16804 *
16805 * @param bRm The RM byte.
16806 * @param pImpl The instruction implementation.
16807 */
16808FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16809{
16810 /* Registers are handled by a common worker. */
16811 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16812 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16813
16814 /* Memory we do here. */
16815 switch (pIemCpu->enmEffOpSize)
16816 {
16817 case IEMMODE_16BIT:
16818 IEM_MC_BEGIN(2, 2);
16819 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16822
16823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16824 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16825 IEM_MC_FETCH_EFLAGS(EFlags);
16826 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16827 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16828 else
16829 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16830
16831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16832 IEM_MC_COMMIT_EFLAGS(EFlags);
16833 IEM_MC_ADVANCE_RIP();
16834 IEM_MC_END();
16835 return VINF_SUCCESS;
16836
16837 case IEMMODE_32BIT:
16838 IEM_MC_BEGIN(2, 2);
16839 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16842
16843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16844 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16845 IEM_MC_FETCH_EFLAGS(EFlags);
16846 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16847 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16848 else
16849 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16850
16851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16852 IEM_MC_COMMIT_EFLAGS(EFlags);
16853 IEM_MC_ADVANCE_RIP();
16854 IEM_MC_END();
16855 return VINF_SUCCESS;
16856
16857 case IEMMODE_64BIT:
16858 IEM_MC_BEGIN(2, 2);
16859 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16862
16863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16864 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16865 IEM_MC_FETCH_EFLAGS(EFlags);
16866 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16867 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16868 else
16869 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16870
16871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16872 IEM_MC_COMMIT_EFLAGS(EFlags);
16873 IEM_MC_ADVANCE_RIP();
16874 IEM_MC_END();
16875 return VINF_SUCCESS;
16876
16877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16878 }
16879}
16880
16881
16882/** Opcode 0xf6 /0. */
16883FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16884{
16885 IEMOP_MNEMONIC("test Eb,Ib");
16886 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16887
16888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16889 {
16890 /* register access */
16891 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16892 IEMOP_HLP_NO_LOCK_PREFIX();
16893
16894 IEM_MC_BEGIN(3, 0);
16895 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16896 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16897 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16898 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16899 IEM_MC_REF_EFLAGS(pEFlags);
16900 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16901 IEM_MC_ADVANCE_RIP();
16902 IEM_MC_END();
16903 }
16904 else
16905 {
16906 /* memory access. */
16907 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16908
16909 IEM_MC_BEGIN(3, 2);
16910 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16911 IEM_MC_ARG(uint8_t, u8Src, 1);
16912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16914
16915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16916 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16917 IEM_MC_ASSIGN(u8Src, u8Imm);
16918 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16919 IEM_MC_FETCH_EFLAGS(EFlags);
16920 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16921
16922 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16923 IEM_MC_COMMIT_EFLAGS(EFlags);
16924 IEM_MC_ADVANCE_RIP();
16925 IEM_MC_END();
16926 }
16927 return VINF_SUCCESS;
16928}
16929
16930
16931/** Opcode 0xf7 /0. */
16932FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16933{
16934 IEMOP_MNEMONIC("test Ev,Iv");
16935 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16936 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16937
16938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16939 {
16940 /* register access */
16941 switch (pIemCpu->enmEffOpSize)
16942 {
16943 case IEMMODE_16BIT:
16944 {
16945 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16946 IEM_MC_BEGIN(3, 0);
16947 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16948 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16949 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16950 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16951 IEM_MC_REF_EFLAGS(pEFlags);
16952 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16953 IEM_MC_ADVANCE_RIP();
16954 IEM_MC_END();
16955 return VINF_SUCCESS;
16956 }
16957
16958 case IEMMODE_32BIT:
16959 {
16960 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16961 IEM_MC_BEGIN(3, 0);
16962 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16963 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16965 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16966 IEM_MC_REF_EFLAGS(pEFlags);
16967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16968 /* No clearing the high dword here - test doesn't write back the result. */
16969 IEM_MC_ADVANCE_RIP();
16970 IEM_MC_END();
16971 return VINF_SUCCESS;
16972 }
16973
16974 case IEMMODE_64BIT:
16975 {
16976 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16977 IEM_MC_BEGIN(3, 0);
16978 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16979 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16980 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16981 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16982 IEM_MC_REF_EFLAGS(pEFlags);
16983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16984 IEM_MC_ADVANCE_RIP();
16985 IEM_MC_END();
16986 return VINF_SUCCESS;
16987 }
16988
16989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16990 }
16991 }
16992 else
16993 {
16994 /* memory access. */
16995 switch (pIemCpu->enmEffOpSize)
16996 {
16997 case IEMMODE_16BIT:
16998 {
16999 IEM_MC_BEGIN(3, 2);
17000 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17001 IEM_MC_ARG(uint16_t, u16Src, 1);
17002 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17004
17005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17006 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17007 IEM_MC_ASSIGN(u16Src, u16Imm);
17008 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17009 IEM_MC_FETCH_EFLAGS(EFlags);
17010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17011
17012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17013 IEM_MC_COMMIT_EFLAGS(EFlags);
17014 IEM_MC_ADVANCE_RIP();
17015 IEM_MC_END();
17016 return VINF_SUCCESS;
17017 }
17018
17019 case IEMMODE_32BIT:
17020 {
17021 IEM_MC_BEGIN(3, 2);
17022 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17023 IEM_MC_ARG(uint32_t, u32Src, 1);
17024 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17026
17027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17028 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17029 IEM_MC_ASSIGN(u32Src, u32Imm);
17030 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17031 IEM_MC_FETCH_EFLAGS(EFlags);
17032 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17033
17034 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17035 IEM_MC_COMMIT_EFLAGS(EFlags);
17036 IEM_MC_ADVANCE_RIP();
17037 IEM_MC_END();
17038 return VINF_SUCCESS;
17039 }
17040
17041 case IEMMODE_64BIT:
17042 {
17043 IEM_MC_BEGIN(3, 2);
17044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17045 IEM_MC_ARG(uint64_t, u64Src, 1);
17046 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17048
17049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17050 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17051 IEM_MC_ASSIGN(u64Src, u64Imm);
17052 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17053 IEM_MC_FETCH_EFLAGS(EFlags);
17054 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17055
17056 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17057 IEM_MC_COMMIT_EFLAGS(EFlags);
17058 IEM_MC_ADVANCE_RIP();
17059 IEM_MC_END();
17060 return VINF_SUCCESS;
17061 }
17062
17063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17064 }
17065 }
17066}
17067
17068
17069/** Opcode 0xf6 /4, /5, /6 and /7. */
17070FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17071{
17072 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17073
17074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17075 {
17076 /* register access */
17077 IEMOP_HLP_NO_LOCK_PREFIX();
17078 IEM_MC_BEGIN(3, 1);
17079 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17080 IEM_MC_ARG(uint8_t, u8Value, 1);
17081 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17082 IEM_MC_LOCAL(int32_t, rc);
17083
17084 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17085 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17086 IEM_MC_REF_EFLAGS(pEFlags);
17087 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17088 IEM_MC_IF_LOCAL_IS_Z(rc) {
17089 IEM_MC_ADVANCE_RIP();
17090 } IEM_MC_ELSE() {
17091 IEM_MC_RAISE_DIVIDE_ERROR();
17092 } IEM_MC_ENDIF();
17093
17094 IEM_MC_END();
17095 }
17096 else
17097 {
17098 /* memory access. */
17099 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17100
17101 IEM_MC_BEGIN(3, 2);
17102 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17103 IEM_MC_ARG(uint8_t, u8Value, 1);
17104 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17106 IEM_MC_LOCAL(int32_t, rc);
17107
17108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17109 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
17110 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17111 IEM_MC_REF_EFLAGS(pEFlags);
17112 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17113 IEM_MC_IF_LOCAL_IS_Z(rc) {
17114 IEM_MC_ADVANCE_RIP();
17115 } IEM_MC_ELSE() {
17116 IEM_MC_RAISE_DIVIDE_ERROR();
17117 } IEM_MC_ENDIF();
17118
17119 IEM_MC_END();
17120 }
17121 return VINF_SUCCESS;
17122}
17123
17124
17125/** Opcode 0xf7 /4, /5, /6 and /7. */
17126FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17127{
17128 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17130
17131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17132 {
17133 /* register access */
17134 switch (pIemCpu->enmEffOpSize)
17135 {
17136 case IEMMODE_16BIT:
17137 {
17138 IEMOP_HLP_NO_LOCK_PREFIX();
17139 IEM_MC_BEGIN(4, 1);
17140 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17141 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17142 IEM_MC_ARG(uint16_t, u16Value, 2);
17143 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17144 IEM_MC_LOCAL(int32_t, rc);
17145
17146 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17147 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17148 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17149 IEM_MC_REF_EFLAGS(pEFlags);
17150 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17151 IEM_MC_IF_LOCAL_IS_Z(rc) {
17152 IEM_MC_ADVANCE_RIP();
17153 } IEM_MC_ELSE() {
17154 IEM_MC_RAISE_DIVIDE_ERROR();
17155 } IEM_MC_ENDIF();
17156
17157 IEM_MC_END();
17158 return VINF_SUCCESS;
17159 }
17160
17161 case IEMMODE_32BIT:
17162 {
17163 IEMOP_HLP_NO_LOCK_PREFIX();
17164 IEM_MC_BEGIN(4, 1);
17165 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17166 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17167 IEM_MC_ARG(uint32_t, u32Value, 2);
17168 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17169 IEM_MC_LOCAL(int32_t, rc);
17170
17171 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17172 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17173 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17174 IEM_MC_REF_EFLAGS(pEFlags);
17175 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17176 IEM_MC_IF_LOCAL_IS_Z(rc) {
17177 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17178 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17179 IEM_MC_ADVANCE_RIP();
17180 } IEM_MC_ELSE() {
17181 IEM_MC_RAISE_DIVIDE_ERROR();
17182 } IEM_MC_ENDIF();
17183
17184 IEM_MC_END();
17185 return VINF_SUCCESS;
17186 }
17187
17188 case IEMMODE_64BIT:
17189 {
17190 IEMOP_HLP_NO_LOCK_PREFIX();
17191 IEM_MC_BEGIN(4, 1);
17192 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17193 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17194 IEM_MC_ARG(uint64_t, u64Value, 2);
17195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17196 IEM_MC_LOCAL(int32_t, rc);
17197
17198 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17199 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17200 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17201 IEM_MC_REF_EFLAGS(pEFlags);
17202 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17203 IEM_MC_IF_LOCAL_IS_Z(rc) {
17204 IEM_MC_ADVANCE_RIP();
17205 } IEM_MC_ELSE() {
17206 IEM_MC_RAISE_DIVIDE_ERROR();
17207 } IEM_MC_ENDIF();
17208
17209 IEM_MC_END();
17210 return VINF_SUCCESS;
17211 }
17212
17213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17214 }
17215 }
17216 else
17217 {
17218 /* memory access. */
17219 switch (pIemCpu->enmEffOpSize)
17220 {
17221 case IEMMODE_16BIT:
17222 {
17223 IEMOP_HLP_NO_LOCK_PREFIX();
17224 IEM_MC_BEGIN(4, 2);
17225 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17226 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17227 IEM_MC_ARG(uint16_t, u16Value, 2);
17228 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17230 IEM_MC_LOCAL(int32_t, rc);
17231
17232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17233 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17234 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17235 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17236 IEM_MC_REF_EFLAGS(pEFlags);
17237 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17238 IEM_MC_IF_LOCAL_IS_Z(rc) {
17239 IEM_MC_ADVANCE_RIP();
17240 } IEM_MC_ELSE() {
17241 IEM_MC_RAISE_DIVIDE_ERROR();
17242 } IEM_MC_ENDIF();
17243
17244 IEM_MC_END();
17245 return VINF_SUCCESS;
17246 }
17247
17248 case IEMMODE_32BIT:
17249 {
17250 IEMOP_HLP_NO_LOCK_PREFIX();
17251 IEM_MC_BEGIN(4, 2);
17252 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17253 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17254 IEM_MC_ARG(uint32_t, u32Value, 2);
17255 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17257 IEM_MC_LOCAL(int32_t, rc);
17258
17259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17260 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17261 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17262 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17263 IEM_MC_REF_EFLAGS(pEFlags);
17264 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17265 IEM_MC_IF_LOCAL_IS_Z(rc) {
17266 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17267 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17268 IEM_MC_ADVANCE_RIP();
17269 } IEM_MC_ELSE() {
17270 IEM_MC_RAISE_DIVIDE_ERROR();
17271 } IEM_MC_ENDIF();
17272
17273 IEM_MC_END();
17274 return VINF_SUCCESS;
17275 }
17276
17277 case IEMMODE_64BIT:
17278 {
17279 IEMOP_HLP_NO_LOCK_PREFIX();
17280 IEM_MC_BEGIN(4, 2);
17281 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17282 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17283 IEM_MC_ARG(uint64_t, u64Value, 2);
17284 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17286 IEM_MC_LOCAL(int32_t, rc);
17287
17288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17289 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17290 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17291 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17292 IEM_MC_REF_EFLAGS(pEFlags);
17293 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17294 IEM_MC_IF_LOCAL_IS_Z(rc) {
17295 IEM_MC_ADVANCE_RIP();
17296 } IEM_MC_ELSE() {
17297 IEM_MC_RAISE_DIVIDE_ERROR();
17298 } IEM_MC_ENDIF();
17299
17300 IEM_MC_END();
17301 return VINF_SUCCESS;
17302 }
17303
17304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17305 }
17306 }
17307}
17308
17309/** Opcode 0xf6. */
17310FNIEMOP_DEF(iemOp_Grp3_Eb)
17311{
17312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17313 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17314 {
17315 case 0:
17316 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17317 case 1:
17318/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17319 return IEMOP_RAISE_INVALID_OPCODE();
17320 case 2:
17321 IEMOP_MNEMONIC("not Eb");
17322 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17323 case 3:
17324 IEMOP_MNEMONIC("neg Eb");
17325 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17326 case 4:
17327 IEMOP_MNEMONIC("mul Eb");
17328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17329 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17330 case 5:
17331 IEMOP_MNEMONIC("imul Eb");
17332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17333 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17334 case 6:
17335 IEMOP_MNEMONIC("div Eb");
17336 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17337 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17338 case 7:
17339 IEMOP_MNEMONIC("idiv Eb");
17340 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17341 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17343 }
17344}
17345
17346
17347/** Opcode 0xf7. */
17348FNIEMOP_DEF(iemOp_Grp3_Ev)
17349{
17350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17351 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17352 {
17353 case 0:
17354 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17355 case 1:
17356/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17357 return IEMOP_RAISE_INVALID_OPCODE();
17358 case 2:
17359 IEMOP_MNEMONIC("not Ev");
17360 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17361 case 3:
17362 IEMOP_MNEMONIC("neg Ev");
17363 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17364 case 4:
17365 IEMOP_MNEMONIC("mul Ev");
17366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17367 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17368 case 5:
17369 IEMOP_MNEMONIC("imul Ev");
17370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17371 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17372 case 6:
17373 IEMOP_MNEMONIC("div Ev");
17374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17375 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17376 case 7:
17377 IEMOP_MNEMONIC("idiv Ev");
17378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17379 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17381 }
17382}
17383
17384
17385/** Opcode 0xf8. */
17386FNIEMOP_DEF(iemOp_clc)
17387{
17388 IEMOP_MNEMONIC("clc");
17389 IEMOP_HLP_NO_LOCK_PREFIX();
17390 IEM_MC_BEGIN(0, 0);
17391 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17392 IEM_MC_ADVANCE_RIP();
17393 IEM_MC_END();
17394 return VINF_SUCCESS;
17395}
17396
17397
17398/** Opcode 0xf9. */
17399FNIEMOP_DEF(iemOp_stc)
17400{
17401 IEMOP_MNEMONIC("stc");
17402 IEMOP_HLP_NO_LOCK_PREFIX();
17403 IEM_MC_BEGIN(0, 0);
17404 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17405 IEM_MC_ADVANCE_RIP();
17406 IEM_MC_END();
17407 return VINF_SUCCESS;
17408}
17409
17410
17411/** Opcode 0xfa. */
17412FNIEMOP_DEF(iemOp_cli)
17413{
17414 IEMOP_MNEMONIC("cli");
17415 IEMOP_HLP_NO_LOCK_PREFIX();
17416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17417}
17418
17419
17420FNIEMOP_DEF(iemOp_sti)
17421{
17422 IEMOP_MNEMONIC("sti");
17423 IEMOP_HLP_NO_LOCK_PREFIX();
17424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17425}
17426
17427
17428/** Opcode 0xfc. */
17429FNIEMOP_DEF(iemOp_cld)
17430{
17431 IEMOP_MNEMONIC("cld");
17432 IEMOP_HLP_NO_LOCK_PREFIX();
17433 IEM_MC_BEGIN(0, 0);
17434 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17435 IEM_MC_ADVANCE_RIP();
17436 IEM_MC_END();
17437 return VINF_SUCCESS;
17438}
17439
17440
17441/** Opcode 0xfd. */
17442FNIEMOP_DEF(iemOp_std)
17443{
17444 IEMOP_MNEMONIC("std");
17445 IEMOP_HLP_NO_LOCK_PREFIX();
17446 IEM_MC_BEGIN(0, 0);
17447 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17448 IEM_MC_ADVANCE_RIP();
17449 IEM_MC_END();
17450 return VINF_SUCCESS;
17451}
17452
17453
17454/** Opcode 0xfe. */
17455FNIEMOP_DEF(iemOp_Grp4)
17456{
17457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17458 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17459 {
17460 case 0:
17461 IEMOP_MNEMONIC("inc Ev");
17462 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17463 case 1:
17464 IEMOP_MNEMONIC("dec Ev");
17465 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17466 default:
17467 IEMOP_MNEMONIC("grp4-ud");
17468 return IEMOP_RAISE_INVALID_OPCODE();
17469 }
17470}
17471
17472
17473/**
17474 * Opcode 0xff /2.
17475 * @param bRm The RM byte.
17476 */
17477FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17478{
17479 IEMOP_MNEMONIC("calln Ev");
17480 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17482
17483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17484 {
17485 /* The new RIP is taken from a register. */
17486 switch (pIemCpu->enmEffOpSize)
17487 {
17488 case IEMMODE_16BIT:
17489 IEM_MC_BEGIN(1, 0);
17490 IEM_MC_ARG(uint16_t, u16Target, 0);
17491 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17492 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17493 IEM_MC_END()
17494 return VINF_SUCCESS;
17495
17496 case IEMMODE_32BIT:
17497 IEM_MC_BEGIN(1, 0);
17498 IEM_MC_ARG(uint32_t, u32Target, 0);
17499 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17500 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17501 IEM_MC_END()
17502 return VINF_SUCCESS;
17503
17504 case IEMMODE_64BIT:
17505 IEM_MC_BEGIN(1, 0);
17506 IEM_MC_ARG(uint64_t, u64Target, 0);
17507 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17508 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17509 IEM_MC_END()
17510 return VINF_SUCCESS;
17511
17512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17513 }
17514 }
17515 else
17516 {
17517 /* The new RIP is taken from a register. */
17518 switch (pIemCpu->enmEffOpSize)
17519 {
17520 case IEMMODE_16BIT:
17521 IEM_MC_BEGIN(1, 1);
17522 IEM_MC_ARG(uint16_t, u16Target, 0);
17523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17525 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17526 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17527 IEM_MC_END()
17528 return VINF_SUCCESS;
17529
17530 case IEMMODE_32BIT:
17531 IEM_MC_BEGIN(1, 1);
17532 IEM_MC_ARG(uint32_t, u32Target, 0);
17533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17535 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17536 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17537 IEM_MC_END()
17538 return VINF_SUCCESS;
17539
17540 case IEMMODE_64BIT:
17541 IEM_MC_BEGIN(1, 1);
17542 IEM_MC_ARG(uint64_t, u64Target, 0);
17543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17545 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17546 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17547 IEM_MC_END()
17548 return VINF_SUCCESS;
17549
17550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17551 }
17552 }
17553}
17554
17555typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17556
17557FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17558{
17559 /* Registers? How?? */
17560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17561 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17562
17563 /* Far pointer loaded from memory. */
17564 switch (pIemCpu->enmEffOpSize)
17565 {
17566 case IEMMODE_16BIT:
17567 IEM_MC_BEGIN(3, 1);
17568 IEM_MC_ARG(uint16_t, u16Sel, 0);
17569 IEM_MC_ARG(uint16_t, offSeg, 1);
17570 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17574 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17575 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17576 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17577 IEM_MC_END();
17578 return VINF_SUCCESS;
17579
17580 case IEMMODE_64BIT:
17581 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17582 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17583 * and call far qword [rsp] encodings. */
17584 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17585 {
17586 IEM_MC_BEGIN(3, 1);
17587 IEM_MC_ARG(uint16_t, u16Sel, 0);
17588 IEM_MC_ARG(uint64_t, offSeg, 1);
17589 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17593 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17594 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17595 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17596 IEM_MC_END();
17597 return VINF_SUCCESS;
17598 }
17599 /* AMD falls thru. */
17600
17601 case IEMMODE_32BIT:
17602 IEM_MC_BEGIN(3, 1);
17603 IEM_MC_ARG(uint16_t, u16Sel, 0);
17604 IEM_MC_ARG(uint32_t, offSeg, 1);
17605 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17609 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17610 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17611 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17612 IEM_MC_END();
17613 return VINF_SUCCESS;
17614
17615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17616 }
17617}
17618
17619
17620/**
17621 * Opcode 0xff /3.
17622 * @param bRm The RM byte.
17623 */
17624FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17625{
17626 IEMOP_MNEMONIC("callf Ep");
17627 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17628}
17629
17630
17631/**
17632 * Opcode 0xff /4.
17633 * @param bRm The RM byte.
17634 */
17635FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17636{
17637 IEMOP_MNEMONIC("jmpn Ev");
17638 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17640
17641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17642 {
17643 /* The new RIP is taken from a register. */
17644 switch (pIemCpu->enmEffOpSize)
17645 {
17646 case IEMMODE_16BIT:
17647 IEM_MC_BEGIN(0, 1);
17648 IEM_MC_LOCAL(uint16_t, u16Target);
17649 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17650 IEM_MC_SET_RIP_U16(u16Target);
17651 IEM_MC_END()
17652 return VINF_SUCCESS;
17653
17654 case IEMMODE_32BIT:
17655 IEM_MC_BEGIN(0, 1);
17656 IEM_MC_LOCAL(uint32_t, u32Target);
17657 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17658 IEM_MC_SET_RIP_U32(u32Target);
17659 IEM_MC_END()
17660 return VINF_SUCCESS;
17661
17662 case IEMMODE_64BIT:
17663 IEM_MC_BEGIN(0, 1);
17664 IEM_MC_LOCAL(uint64_t, u64Target);
17665 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17666 IEM_MC_SET_RIP_U64(u64Target);
17667 IEM_MC_END()
17668 return VINF_SUCCESS;
17669
17670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17671 }
17672 }
17673 else
17674 {
17675 /* The new RIP is taken from a memory location. */
17676 switch (pIemCpu->enmEffOpSize)
17677 {
17678 case IEMMODE_16BIT:
17679 IEM_MC_BEGIN(0, 2);
17680 IEM_MC_LOCAL(uint16_t, u16Target);
17681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17683 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17684 IEM_MC_SET_RIP_U16(u16Target);
17685 IEM_MC_END()
17686 return VINF_SUCCESS;
17687
17688 case IEMMODE_32BIT:
17689 IEM_MC_BEGIN(0, 2);
17690 IEM_MC_LOCAL(uint32_t, u32Target);
17691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17693 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17694 IEM_MC_SET_RIP_U32(u32Target);
17695 IEM_MC_END()
17696 return VINF_SUCCESS;
17697
17698 case IEMMODE_64BIT:
17699 IEM_MC_BEGIN(0, 2);
17700 IEM_MC_LOCAL(uint64_t, u64Target);
17701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17703 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17704 IEM_MC_SET_RIP_U64(u64Target);
17705 IEM_MC_END()
17706 return VINF_SUCCESS;
17707
17708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17709 }
17710 }
17711}
17712
17713
17714/**
17715 * Opcode 0xff /5.
17716 * @param bRm The RM byte.
17717 */
17718FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17719{
17720 IEMOP_MNEMONIC("jmpf Ep");
17721 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17722}
17723
17724
17725/**
17726 * Opcode 0xff /6.
17727 * @param bRm The RM byte.
17728 */
17729FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17730{
17731 IEMOP_MNEMONIC("push Ev");
17732 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17733
17734 /* Registers are handled by a common worker. */
17735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17736 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17737
17738 /* Memory we do here. */
17739 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17740 switch (pIemCpu->enmEffOpSize)
17741 {
17742 case IEMMODE_16BIT:
17743 IEM_MC_BEGIN(0, 2);
17744 IEM_MC_LOCAL(uint16_t, u16Src);
17745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17747 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17748 IEM_MC_PUSH_U16(u16Src);
17749 IEM_MC_ADVANCE_RIP();
17750 IEM_MC_END();
17751 return VINF_SUCCESS;
17752
17753 case IEMMODE_32BIT:
17754 IEM_MC_BEGIN(0, 2);
17755 IEM_MC_LOCAL(uint32_t, u32Src);
17756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17758 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17759 IEM_MC_PUSH_U32(u32Src);
17760 IEM_MC_ADVANCE_RIP();
17761 IEM_MC_END();
17762 return VINF_SUCCESS;
17763
17764 case IEMMODE_64BIT:
17765 IEM_MC_BEGIN(0, 2);
17766 IEM_MC_LOCAL(uint64_t, u64Src);
17767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17769 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17770 IEM_MC_PUSH_U64(u64Src);
17771 IEM_MC_ADVANCE_RIP();
17772 IEM_MC_END();
17773 return VINF_SUCCESS;
17774
17775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17776 }
17777}
17778
17779
17780/** Opcode 0xff. */
17781FNIEMOP_DEF(iemOp_Grp5)
17782{
17783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17784 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17785 {
17786 case 0:
17787 IEMOP_MNEMONIC("inc Ev");
17788 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17789 case 1:
17790 IEMOP_MNEMONIC("dec Ev");
17791 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17792 case 2:
17793 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17794 case 3:
17795 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17796 case 4:
17797 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17798 case 5:
17799 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17800 case 6:
17801 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17802 case 7:
17803 IEMOP_MNEMONIC("grp5-ud");
17804 return IEMOP_RAISE_INVALID_OPCODE();
17805 }
17806 AssertFailedReturn(VERR_IEM_IPE_3);
17807}
17808
17809
17810
17811const PFNIEMOP g_apfnOneByteMap[256] =
17812{
17813 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17814 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17815 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17816 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17817 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17818 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17819 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17820 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17821 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17822 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17823 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17824 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17825 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17826 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17827 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17828 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17829 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17830 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17831 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17832 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17833 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17834 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17835 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17836 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17837 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17838 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17839 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17840 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17841 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17842 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17843 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17844 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17845 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17846 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17847 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17848 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17849 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17850 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17851 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17852 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17853 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17854 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17855 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17856 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17857 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17858 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17859 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17860 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17861 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17862 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17863 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17864 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17865 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17866 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17867 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17868 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17869 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17870 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17871 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17872 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17873 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17874 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17875 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17876 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17877};
17878
17879
17880/** @} */
17881
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette