VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61064

Last change on this file since 61064 was 61064, checked in by vboxsync, 9 years ago

IEM: FPU & SSE ring-0/raw-mode usage preps.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 605.4 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61064 2016-05-19 22:20:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1698{
1699 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1702 {
1703 /*
1704 * Register, register.
1705 */
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1707 IEM_MC_BEGIN(0, 0);
1708 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1710 else
1711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1713 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1714 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1715 IEM_MC_ADVANCE_RIP();
1716 IEM_MC_END();
1717 }
1718 else
1719 {
1720 /*
1721 * Register, memory.
1722 */
1723 IEM_MC_BEGIN(0, 2);
1724 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1726
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1729 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1730 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1731 else
1732 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1733 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1734
1735 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1736 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1737
1738 IEM_MC_ADVANCE_RIP();
1739 IEM_MC_END();
1740 }
1741 return VINF_SUCCESS;
1742}
1743
1744
1745/** Opcode 0x0f 0x29. */
1746FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1747{
1748 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752 /*
1753 * Register, register.
1754 */
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1756 IEM_MC_BEGIN(0, 0);
1757 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1759 else
1760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1761 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1762 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1763 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 }
1767 else
1768 {
1769 /*
1770 * Memory, register.
1771 */
1772 IEM_MC_BEGIN(0, 2);
1773 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1775
1776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1777 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1778 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1779 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1780 else
1781 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1782 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1783
1784 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1785 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1786
1787 IEM_MC_ADVANCE_RIP();
1788 IEM_MC_END();
1789 }
1790 return VINF_SUCCESS;
1791}
1792
1793
1794/** Opcode 0x0f 0x2a. */
1795FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1796
1797
1798/** Opcode 0x0f 0x2b. */
1799FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1800{
1801 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1804 {
1805 /*
1806 * Register, memory.
1807 */
1808 IEM_MC_BEGIN(0, 2);
1809 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1811
1812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1813 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1814 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1815 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1816 else
1817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1818 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1819
1820 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1821 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1822
1823 IEM_MC_ADVANCE_RIP();
1824 IEM_MC_END();
1825 }
1826 /* The register, register encoding is invalid. */
1827 else
1828 return IEMOP_RAISE_INVALID_OPCODE();
1829 return VINF_SUCCESS;
1830}
1831
1832
1833/** Opcode 0x0f 0x2c. */
1834FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1835/** Opcode 0x0f 0x2d. */
1836FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1837/** Opcode 0x0f 0x2e. */
1838FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1839/** Opcode 0x0f 0x2f. */
1840FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1841
1842
1843/** Opcode 0x0f 0x30. */
1844FNIEMOP_DEF(iemOp_wrmsr)
1845{
1846 IEMOP_MNEMONIC("wrmsr");
1847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1848 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1849}
1850
1851
1852/** Opcode 0x0f 0x31. */
1853FNIEMOP_DEF(iemOp_rdtsc)
1854{
1855 IEMOP_MNEMONIC("rdtsc");
1856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1857 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1858}
1859
1860
1861/** Opcode 0x0f 0x33. */
1862FNIEMOP_DEF(iemOp_rdmsr)
1863{
1864 IEMOP_MNEMONIC("rdmsr");
1865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1866 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1867}
1868
1869
1870/** Opcode 0x0f 0x34. */
1871FNIEMOP_STUB(iemOp_rdpmc);
1872/** Opcode 0x0f 0x34. */
1873FNIEMOP_STUB(iemOp_sysenter);
1874/** Opcode 0x0f 0x35. */
1875FNIEMOP_STUB(iemOp_sysexit);
1876/** Opcode 0x0f 0x37. */
1877FNIEMOP_STUB(iemOp_getsec);
1878/** Opcode 0x0f 0x38. */
1879FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1880/** Opcode 0x0f 0x3a. */
1881FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1882
1883
1884/**
1885 * Implements a conditional move.
1886 *
1887 * Wish there was an obvious way to do this where we could share and reduce
1888 * code bloat.
1889 *
1890 * @param a_Cnd The conditional "microcode" operation.
1891 */
1892#define CMOV_X(a_Cnd) \
1893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1895 { \
1896 switch (pIemCpu->enmEffOpSize) \
1897 { \
1898 case IEMMODE_16BIT: \
1899 IEM_MC_BEGIN(0, 1); \
1900 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1901 a_Cnd { \
1902 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1903 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1904 } IEM_MC_ENDIF(); \
1905 IEM_MC_ADVANCE_RIP(); \
1906 IEM_MC_END(); \
1907 return VINF_SUCCESS; \
1908 \
1909 case IEMMODE_32BIT: \
1910 IEM_MC_BEGIN(0, 1); \
1911 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1912 a_Cnd { \
1913 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1914 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1915 } IEM_MC_ELSE() { \
1916 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1917 } IEM_MC_ENDIF(); \
1918 IEM_MC_ADVANCE_RIP(); \
1919 IEM_MC_END(); \
1920 return VINF_SUCCESS; \
1921 \
1922 case IEMMODE_64BIT: \
1923 IEM_MC_BEGIN(0, 1); \
1924 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1925 a_Cnd { \
1926 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1927 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1928 } IEM_MC_ENDIF(); \
1929 IEM_MC_ADVANCE_RIP(); \
1930 IEM_MC_END(); \
1931 return VINF_SUCCESS; \
1932 \
1933 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1934 } \
1935 } \
1936 else \
1937 { \
1938 switch (pIemCpu->enmEffOpSize) \
1939 { \
1940 case IEMMODE_16BIT: \
1941 IEM_MC_BEGIN(0, 2); \
1942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1943 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1945 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1946 a_Cnd { \
1947 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1948 } IEM_MC_ENDIF(); \
1949 IEM_MC_ADVANCE_RIP(); \
1950 IEM_MC_END(); \
1951 return VINF_SUCCESS; \
1952 \
1953 case IEMMODE_32BIT: \
1954 IEM_MC_BEGIN(0, 2); \
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1956 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1958 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1959 a_Cnd { \
1960 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1961 } IEM_MC_ELSE() { \
1962 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1963 } IEM_MC_ENDIF(); \
1964 IEM_MC_ADVANCE_RIP(); \
1965 IEM_MC_END(); \
1966 return VINF_SUCCESS; \
1967 \
1968 case IEMMODE_64BIT: \
1969 IEM_MC_BEGIN(0, 2); \
1970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1971 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1973 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1974 a_Cnd { \
1975 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1976 } IEM_MC_ENDIF(); \
1977 IEM_MC_ADVANCE_RIP(); \
1978 IEM_MC_END(); \
1979 return VINF_SUCCESS; \
1980 \
1981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1982 } \
1983 } do {} while (0)
1984
1985
1986
1987/** Opcode 0x0f 0x40. */
1988FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1989{
1990 IEMOP_MNEMONIC("cmovo Gv,Ev");
1991 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1992}
1993
1994
1995/** Opcode 0x0f 0x41. */
1996FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1997{
1998 IEMOP_MNEMONIC("cmovno Gv,Ev");
1999 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2000}
2001
2002
2003/** Opcode 0x0f 0x42. */
2004FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2005{
2006 IEMOP_MNEMONIC("cmovc Gv,Ev");
2007 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2008}
2009
2010
2011/** Opcode 0x0f 0x43. */
2012FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2013{
2014 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2015 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2016}
2017
2018
2019/** Opcode 0x0f 0x44. */
2020FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2021{
2022 IEMOP_MNEMONIC("cmove Gv,Ev");
2023 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2024}
2025
2026
2027/** Opcode 0x0f 0x45. */
2028FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2029{
2030 IEMOP_MNEMONIC("cmovne Gv,Ev");
2031 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2032}
2033
2034
2035/** Opcode 0x0f 0x46. */
2036FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2037{
2038 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2039 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2040}
2041
2042
2043/** Opcode 0x0f 0x47. */
2044FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2045{
2046 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2047 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2048}
2049
2050
2051/** Opcode 0x0f 0x48. */
2052FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2053{
2054 IEMOP_MNEMONIC("cmovs Gv,Ev");
2055 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2056}
2057
2058
2059/** Opcode 0x0f 0x49. */
2060FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2061{
2062 IEMOP_MNEMONIC("cmovns Gv,Ev");
2063 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2064}
2065
2066
2067/** Opcode 0x0f 0x4a. */
2068FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2069{
2070 IEMOP_MNEMONIC("cmovp Gv,Ev");
2071 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2072}
2073
2074
2075/** Opcode 0x0f 0x4b. */
2076FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2077{
2078 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2079 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2080}
2081
2082
2083/** Opcode 0x0f 0x4c. */
2084FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2085{
2086 IEMOP_MNEMONIC("cmovl Gv,Ev");
2087 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2088}
2089
2090
2091/** Opcode 0x0f 0x4d. */
2092FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2093{
2094 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2095 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2096}
2097
2098
2099/** Opcode 0x0f 0x4e. */
2100FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2101{
2102 IEMOP_MNEMONIC("cmovle Gv,Ev");
2103 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2104}
2105
2106
2107/** Opcode 0x0f 0x4f. */
2108FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2109{
2110 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2111 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2112}
2113
2114#undef CMOV_X
2115
2116/** Opcode 0x0f 0x50. */
2117FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2118/** Opcode 0x0f 0x51. */
2119FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2120/** Opcode 0x0f 0x52. */
2121FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2122/** Opcode 0x0f 0x53. */
2123FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2124/** Opcode 0x0f 0x54. */
2125FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2126/** Opcode 0x0f 0x55. */
2127FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2128/** Opcode 0x0f 0x56. */
2129FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2130/** Opcode 0x0f 0x57. */
2131FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2132/** Opcode 0x0f 0x58. */
2133FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2134/** Opcode 0x0f 0x59. */
2135FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2136/** Opcode 0x0f 0x5a. */
2137FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2138/** Opcode 0x0f 0x5b. */
2139FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2140/** Opcode 0x0f 0x5c. */
2141FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2142/** Opcode 0x0f 0x5d. */
2143FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2144/** Opcode 0x0f 0x5e. */
2145FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2146/** Opcode 0x0f 0x5f. */
2147FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2148
2149
2150/**
2151 * Common worker for SSE2 and MMX instructions on the forms:
2152 * pxxxx xmm1, xmm2/mem128
2153 * pxxxx mm1, mm2/mem32
2154 *
2155 * The 2nd operand is the first half of a register, which in the memory case
2156 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2157 * memory accessed for MMX.
2158 *
2159 * Exceptions type 4.
2160 */
2161FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2162{
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2165 {
2166 case IEM_OP_PRF_SIZE_OP: /* SSE */
2167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2168 {
2169 /*
2170 * Register, register.
2171 */
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_BEGIN(2, 0);
2174 IEM_MC_ARG(uint128_t *, pDst, 0);
2175 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2176 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2177 IEM_MC_PREPARE_SSE_USAGE();
2178 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2179 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2180 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2181 IEM_MC_ADVANCE_RIP();
2182 IEM_MC_END();
2183 }
2184 else
2185 {
2186 /*
2187 * Register, memory.
2188 */
2189 IEM_MC_BEGIN(2, 2);
2190 IEM_MC_ARG(uint128_t *, pDst, 0);
2191 IEM_MC_LOCAL(uint64_t, uSrc);
2192 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2194
2195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2197 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2198 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2199
2200 IEM_MC_PREPARE_SSE_USAGE();
2201 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2202 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2203
2204 IEM_MC_ADVANCE_RIP();
2205 IEM_MC_END();
2206 }
2207 return VINF_SUCCESS;
2208
2209 case 0: /* MMX */
2210 if (!pImpl->pfnU64)
2211 return IEMOP_RAISE_INVALID_OPCODE();
2212 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2213 {
2214 /*
2215 * Register, register.
2216 */
2217 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2218 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_BEGIN(2, 0);
2221 IEM_MC_ARG(uint64_t *, pDst, 0);
2222 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2223 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2224 IEM_MC_PREPARE_FPU_USAGE();
2225 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2226 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2227 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2228 IEM_MC_ADVANCE_RIP();
2229 IEM_MC_END();
2230 }
2231 else
2232 {
2233 /*
2234 * Register, memory.
2235 */
2236 IEM_MC_BEGIN(2, 2);
2237 IEM_MC_ARG(uint64_t *, pDst, 0);
2238 IEM_MC_LOCAL(uint32_t, uSrc);
2239 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2241
2242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2244 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2245 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2246
2247 IEM_MC_PREPARE_FPU_USAGE();
2248 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2249 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2250
2251 IEM_MC_ADVANCE_RIP();
2252 IEM_MC_END();
2253 }
2254 return VINF_SUCCESS;
2255
2256 default:
2257 return IEMOP_RAISE_INVALID_OPCODE();
2258 }
2259}
2260
2261
2262/** Opcode 0x0f 0x60. */
2263FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2264{
2265 IEMOP_MNEMONIC("punpcklbw");
2266 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2267}
2268
2269
2270/** Opcode 0x0f 0x61. */
2271FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2272{
2273 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2274 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2275}
2276
2277
2278/** Opcode 0x0f 0x62. */
2279FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2280{
2281 IEMOP_MNEMONIC("punpckldq");
2282 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2283}
2284
2285
2286/** Opcode 0x0f 0x63. */
2287FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2288/** Opcode 0x0f 0x64. */
2289FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2290/** Opcode 0x0f 0x65. */
2291FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2292/** Opcode 0x0f 0x66. */
2293FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2294/** Opcode 0x0f 0x67. */
2295FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2296
2297
2298/**
2299 * Common worker for SSE2 and MMX instructions on the forms:
2300 * pxxxx xmm1, xmm2/mem128
2301 * pxxxx mm1, mm2/mem64
2302 *
2303 * The 2nd operand is the second half of a register, which in the memory case
2304 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2305 * where it may read the full 128 bits or only the upper 64 bits.
2306 *
2307 * Exceptions type 4.
2308 */
2309FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2310{
2311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2312 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2313 {
2314 case IEM_OP_PRF_SIZE_OP: /* SSE */
2315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2316 {
2317 /*
2318 * Register, register.
2319 */
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_BEGIN(2, 0);
2322 IEM_MC_ARG(uint128_t *, pDst, 0);
2323 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2324 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2325 IEM_MC_PREPARE_SSE_USAGE();
2326 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2327 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2328 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2329 IEM_MC_ADVANCE_RIP();
2330 IEM_MC_END();
2331 }
2332 else
2333 {
2334 /*
2335 * Register, memory.
2336 */
2337 IEM_MC_BEGIN(2, 2);
2338 IEM_MC_ARG(uint128_t *, pDst, 0);
2339 IEM_MC_LOCAL(uint128_t, uSrc);
2340 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2342
2343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2346 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2347
2348 IEM_MC_PREPARE_SSE_USAGE();
2349 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2350 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2351
2352 IEM_MC_ADVANCE_RIP();
2353 IEM_MC_END();
2354 }
2355 return VINF_SUCCESS;
2356
2357 case 0: /* MMX */
2358 if (!pImpl->pfnU64)
2359 return IEMOP_RAISE_INVALID_OPCODE();
2360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2361 {
2362 /*
2363 * Register, register.
2364 */
2365 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2366 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2368 IEM_MC_BEGIN(2, 0);
2369 IEM_MC_ARG(uint64_t *, pDst, 0);
2370 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2371 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2372 IEM_MC_PREPARE_FPU_USAGE();
2373 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2374 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2375 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2376 IEM_MC_ADVANCE_RIP();
2377 IEM_MC_END();
2378 }
2379 else
2380 {
2381 /*
2382 * Register, memory.
2383 */
2384 IEM_MC_BEGIN(2, 2);
2385 IEM_MC_ARG(uint64_t *, pDst, 0);
2386 IEM_MC_LOCAL(uint64_t, uSrc);
2387 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2389
2390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2392 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2393 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2394
2395 IEM_MC_PREPARE_FPU_USAGE();
2396 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2397 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2398
2399 IEM_MC_ADVANCE_RIP();
2400 IEM_MC_END();
2401 }
2402 return VINF_SUCCESS;
2403
2404 default:
2405 return IEMOP_RAISE_INVALID_OPCODE();
2406 }
2407}
2408
2409
2410/** Opcode 0x0f 0x68. */
2411FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2412{
2413 IEMOP_MNEMONIC("punpckhbw");
2414 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2415}
2416
2417
2418/** Opcode 0x0f 0x69. */
2419FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2420{
2421 IEMOP_MNEMONIC("punpckhwd");
2422 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2423}
2424
2425
2426/** Opcode 0x0f 0x6a. */
2427FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2428{
2429 IEMOP_MNEMONIC("punpckhdq");
2430 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2431}
2432
2433/** Opcode 0x0f 0x6b. */
2434FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2435
2436
2437/** Opcode 0x0f 0x6c. */
2438FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2439{
2440 IEMOP_MNEMONIC("punpcklqdq");
2441 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2442}
2443
2444
2445/** Opcode 0x0f 0x6d. */
2446FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2447{
2448 IEMOP_MNEMONIC("punpckhqdq");
2449 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2450}
2451
2452
2453/** Opcode 0x0f 0x6e. */
2454FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2455{
2456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2457 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2458 {
2459 case IEM_OP_PRF_SIZE_OP: /* SSE */
2460 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2462 {
2463 /* XMM, greg*/
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_BEGIN(0, 1);
2466 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2467 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2468 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2469 {
2470 IEM_MC_LOCAL(uint64_t, u64Tmp);
2471 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2472 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2473 }
2474 else
2475 {
2476 IEM_MC_LOCAL(uint32_t, u32Tmp);
2477 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2478 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2479 }
2480 IEM_MC_ADVANCE_RIP();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /* XMM, [mem] */
2486 IEM_MC_BEGIN(0, 2);
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2492 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2493 {
2494 IEM_MC_LOCAL(uint64_t, u64Tmp);
2495 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2496 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2497 }
2498 else
2499 {
2500 IEM_MC_LOCAL(uint32_t, u32Tmp);
2501 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2502 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2503 }
2504 IEM_MC_ADVANCE_RIP();
2505 IEM_MC_END();
2506 }
2507 return VINF_SUCCESS;
2508
2509 case 0: /* MMX */
2510 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2512 {
2513 /* MMX, greg */
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEM_MC_BEGIN(0, 1);
2516 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2517 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2518 IEM_MC_LOCAL(uint64_t, u64Tmp);
2519 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2520 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2521 else
2522 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2523 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2524 IEM_MC_ADVANCE_RIP();
2525 IEM_MC_END();
2526 }
2527 else
2528 {
2529 /* MMX, [mem] */
2530 IEM_MC_BEGIN(0, 2);
2531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2532 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2536 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2537 {
2538 IEM_MC_LOCAL(uint64_t, u64Tmp);
2539 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2540 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2541 }
2542 else
2543 {
2544 IEM_MC_LOCAL(uint32_t, u32Tmp);
2545 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2546 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2547 }
2548 IEM_MC_ADVANCE_RIP();
2549 IEM_MC_END();
2550 }
2551 return VINF_SUCCESS;
2552
2553 default:
2554 return IEMOP_RAISE_INVALID_OPCODE();
2555 }
2556}
2557
2558
2559/** Opcode 0x0f 0x6f. */
2560FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2561{
2562 bool fAligned = false;
2563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2564 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2565 {
2566 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2567 fAligned = true;
2568 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2569 if (fAligned)
2570 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2571 else
2572 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2574 {
2575 /*
2576 * Register, register.
2577 */
2578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2579 IEM_MC_BEGIN(0, 0);
2580 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2581 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2582 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
2583 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2584 IEM_MC_ADVANCE_RIP();
2585 IEM_MC_END();
2586 }
2587 else
2588 {
2589 /*
2590 * Register, memory.
2591 */
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint128_t, u128Tmp);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600 if (fAligned)
2601 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2602 else
2603 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2604 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2605
2606 IEM_MC_ADVANCE_RIP();
2607 IEM_MC_END();
2608 }
2609 return VINF_SUCCESS;
2610
2611 case 0: /* MMX */
2612 IEMOP_MNEMONIC("movq Pq,Qq");
2613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2614 {
2615 /*
2616 * Register, register.
2617 */
2618 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2619 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2621 IEM_MC_BEGIN(0, 1);
2622 IEM_MC_LOCAL(uint64_t, u64Tmp);
2623 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2625 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2626 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2627 IEM_MC_ADVANCE_RIP();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 /*
2633 * Register, memory.
2634 */
2635 IEM_MC_BEGIN(0, 2);
2636 IEM_MC_LOCAL(uint64_t, u64Tmp);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638
2639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2642 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2643 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2644 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2645
2646 IEM_MC_ADVANCE_RIP();
2647 IEM_MC_END();
2648 }
2649 return VINF_SUCCESS;
2650
2651 default:
2652 return IEMOP_RAISE_INVALID_OPCODE();
2653 }
2654}
2655
2656
2657/** Opcode 0x0f 0x70. The immediate here is evil! */
2658FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2659{
2660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2661 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2662 {
2663 case IEM_OP_PRF_SIZE_OP: /* SSE */
2664 case IEM_OP_PRF_REPNZ: /* SSE */
2665 case IEM_OP_PRF_REPZ: /* SSE */
2666 {
2667 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2668 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2669 {
2670 case IEM_OP_PRF_SIZE_OP:
2671 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2672 pfnAImpl = iemAImpl_pshufd;
2673 break;
2674 case IEM_OP_PRF_REPNZ:
2675 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2676 pfnAImpl = iemAImpl_pshuflw;
2677 break;
2678 case IEM_OP_PRF_REPZ:
2679 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2680 pfnAImpl = iemAImpl_pshufhw;
2681 break;
2682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2683 }
2684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2685 {
2686 /*
2687 * Register, register.
2688 */
2689 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2691
2692 IEM_MC_BEGIN(3, 0);
2693 IEM_MC_ARG(uint128_t *, pDst, 0);
2694 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2695 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2696 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2697 IEM_MC_PREPARE_SSE_USAGE();
2698 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2699 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2700 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2701 IEM_MC_ADVANCE_RIP();
2702 IEM_MC_END();
2703 }
2704 else
2705 {
2706 /*
2707 * Register, memory.
2708 */
2709 IEM_MC_BEGIN(3, 2);
2710 IEM_MC_ARG(uint128_t *, pDst, 0);
2711 IEM_MC_LOCAL(uint128_t, uSrc);
2712 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2714
2715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2716 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2717 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2719 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2720
2721 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2722 IEM_MC_PREPARE_SSE_USAGE();
2723 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2724 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2725
2726 IEM_MC_ADVANCE_RIP();
2727 IEM_MC_END();
2728 }
2729 return VINF_SUCCESS;
2730 }
2731
2732 case 0: /* MMX Extension */
2733 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2734 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2735 {
2736 /*
2737 * Register, register.
2738 */
2739 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2741
2742 IEM_MC_BEGIN(3, 0);
2743 IEM_MC_ARG(uint64_t *, pDst, 0);
2744 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2745 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2746 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2747 IEM_MC_PREPARE_FPU_USAGE();
2748 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2749 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2750 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2751 IEM_MC_ADVANCE_RIP();
2752 IEM_MC_END();
2753 }
2754 else
2755 {
2756 /*
2757 * Register, memory.
2758 */
2759 IEM_MC_BEGIN(3, 2);
2760 IEM_MC_ARG(uint64_t *, pDst, 0);
2761 IEM_MC_LOCAL(uint64_t, uSrc);
2762 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2764
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2766 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2767 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2769 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2770
2771 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2772 IEM_MC_PREPARE_FPU_USAGE();
2773 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2774 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2775
2776 IEM_MC_ADVANCE_RIP();
2777 IEM_MC_END();
2778 }
2779 return VINF_SUCCESS;
2780
2781 default:
2782 return IEMOP_RAISE_INVALID_OPCODE();
2783 }
2784}
2785
2786
2787/** Opcode 0x0f 0x71 11/2. */
2788FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2789
2790/** Opcode 0x66 0x0f 0x71 11/2. */
2791FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2792
2793/** Opcode 0x0f 0x71 11/4. */
2794FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2795
2796/** Opcode 0x66 0x0f 0x71 11/4. */
2797FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2798
2799/** Opcode 0x0f 0x71 11/6. */
2800FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2801
2802/** Opcode 0x66 0x0f 0x71 11/6. */
2803FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2804
2805
2806/** Opcode 0x0f 0x71. */
2807FNIEMOP_DEF(iemOp_Grp12)
2808{
2809 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2810 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2811 return IEMOP_RAISE_INVALID_OPCODE();
2812 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2813 {
2814 case 0: case 1: case 3: case 5: case 7:
2815 return IEMOP_RAISE_INVALID_OPCODE();
2816 case 2:
2817 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2818 {
2819 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2820 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2821 default: return IEMOP_RAISE_INVALID_OPCODE();
2822 }
2823 case 4:
2824 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2825 {
2826 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2827 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2828 default: return IEMOP_RAISE_INVALID_OPCODE();
2829 }
2830 case 6:
2831 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2832 {
2833 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2834 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2835 default: return IEMOP_RAISE_INVALID_OPCODE();
2836 }
2837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2838 }
2839}
2840
2841
2842/** Opcode 0x0f 0x72 11/2. */
2843FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2844
2845/** Opcode 0x66 0x0f 0x72 11/2. */
2846FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2847
2848/** Opcode 0x0f 0x72 11/4. */
2849FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2850
2851/** Opcode 0x66 0x0f 0x72 11/4. */
2852FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2853
2854/** Opcode 0x0f 0x72 11/6. */
2855FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2856
2857/** Opcode 0x66 0x0f 0x72 11/6. */
2858FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2859
2860
2861/** Opcode 0x0f 0x72. */
2862FNIEMOP_DEF(iemOp_Grp13)
2863{
2864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2865 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2866 return IEMOP_RAISE_INVALID_OPCODE();
2867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2868 {
2869 case 0: case 1: case 3: case 5: case 7:
2870 return IEMOP_RAISE_INVALID_OPCODE();
2871 case 2:
2872 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2873 {
2874 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2875 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2876 default: return IEMOP_RAISE_INVALID_OPCODE();
2877 }
2878 case 4:
2879 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2880 {
2881 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2882 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2883 default: return IEMOP_RAISE_INVALID_OPCODE();
2884 }
2885 case 6:
2886 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2887 {
2888 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2889 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2890 default: return IEMOP_RAISE_INVALID_OPCODE();
2891 }
2892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2893 }
2894}
2895
2896
2897/** Opcode 0x0f 0x73 11/2. */
2898FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2899
2900/** Opcode 0x66 0x0f 0x73 11/2. */
2901FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2902
2903/** Opcode 0x66 0x0f 0x73 11/3. */
2904FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2905
2906/** Opcode 0x0f 0x73 11/6. */
2907FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2908
2909/** Opcode 0x66 0x0f 0x73 11/6. */
2910FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2911
2912/** Opcode 0x66 0x0f 0x73 11/7. */
2913FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2914
2915
2916/** Opcode 0x0f 0x73. */
2917FNIEMOP_DEF(iemOp_Grp14)
2918{
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2921 return IEMOP_RAISE_INVALID_OPCODE();
2922 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2923 {
2924 case 0: case 1: case 4: case 5:
2925 return IEMOP_RAISE_INVALID_OPCODE();
2926 case 2:
2927 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2928 {
2929 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2930 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2931 default: return IEMOP_RAISE_INVALID_OPCODE();
2932 }
2933 case 3:
2934 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2935 {
2936 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2937 default: return IEMOP_RAISE_INVALID_OPCODE();
2938 }
2939 case 6:
2940 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2941 {
2942 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2943 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2944 default: return IEMOP_RAISE_INVALID_OPCODE();
2945 }
2946 case 7:
2947 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2948 {
2949 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2950 default: return IEMOP_RAISE_INVALID_OPCODE();
2951 }
2952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2953 }
2954}
2955
2956
2957/**
2958 * Common worker for SSE2 and MMX instructions on the forms:
2959 * pxxx mm1, mm2/mem64
2960 * pxxx xmm1, xmm2/mem128
2961 *
2962 * Proper alignment of the 128-bit operand is enforced.
2963 * Exceptions type 4. SSE2 and MMX cpuid checks.
2964 */
2965FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2966{
2967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2968 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2969 {
2970 case IEM_OP_PRF_SIZE_OP: /* SSE */
2971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2972 {
2973 /*
2974 * Register, register.
2975 */
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_BEGIN(2, 0);
2978 IEM_MC_ARG(uint128_t *, pDst, 0);
2979 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2980 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2981 IEM_MC_PREPARE_SSE_USAGE();
2982 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2983 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2984 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2985 IEM_MC_ADVANCE_RIP();
2986 IEM_MC_END();
2987 }
2988 else
2989 {
2990 /*
2991 * Register, memory.
2992 */
2993 IEM_MC_BEGIN(2, 2);
2994 IEM_MC_ARG(uint128_t *, pDst, 0);
2995 IEM_MC_LOCAL(uint128_t, uSrc);
2996 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2998
2999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3001 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3002 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3003
3004 IEM_MC_PREPARE_SSE_USAGE();
3005 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3006 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3007
3008 IEM_MC_ADVANCE_RIP();
3009 IEM_MC_END();
3010 }
3011 return VINF_SUCCESS;
3012
3013 case 0: /* MMX */
3014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3015 {
3016 /*
3017 * Register, register.
3018 */
3019 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3020 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3022 IEM_MC_BEGIN(2, 0);
3023 IEM_MC_ARG(uint64_t *, pDst, 0);
3024 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3025 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3026 IEM_MC_PREPARE_FPU_USAGE();
3027 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3028 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3029 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3030 IEM_MC_ADVANCE_RIP();
3031 IEM_MC_END();
3032 }
3033 else
3034 {
3035 /*
3036 * Register, memory.
3037 */
3038 IEM_MC_BEGIN(2, 2);
3039 IEM_MC_ARG(uint64_t *, pDst, 0);
3040 IEM_MC_LOCAL(uint64_t, uSrc);
3041 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3043
3044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3047 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3048
3049 IEM_MC_PREPARE_FPU_USAGE();
3050 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3051 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3052
3053 IEM_MC_ADVANCE_RIP();
3054 IEM_MC_END();
3055 }
3056 return VINF_SUCCESS;
3057
3058 default:
3059 return IEMOP_RAISE_INVALID_OPCODE();
3060 }
3061}
3062
3063
3064/** Opcode 0x0f 0x74. */
3065FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3066{
3067 IEMOP_MNEMONIC("pcmpeqb");
3068 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3069}
3070
3071
3072/** Opcode 0x0f 0x75. */
3073FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3074{
3075 IEMOP_MNEMONIC("pcmpeqw");
3076 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3077}
3078
3079
3080/** Opcode 0x0f 0x76. */
3081FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3082{
3083 IEMOP_MNEMONIC("pcmpeqd");
3084 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3085}
3086
3087
3088/** Opcode 0x0f 0x77. */
3089FNIEMOP_STUB(iemOp_emms);
3090/** Opcode 0x0f 0x78. */
3091FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3092/** Opcode 0x0f 0x79. */
3093FNIEMOP_UD_STUB(iemOp_vmwrite);
3094/** Opcode 0x0f 0x7c. */
3095FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3096/** Opcode 0x0f 0x7d. */
3097FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3098
3099
3100/** Opcode 0x0f 0x7e. */
3101FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3102{
3103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3104 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3105 {
3106 case IEM_OP_PRF_SIZE_OP: /* SSE */
3107 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3109 {
3110 /* greg, XMM */
3111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3112 IEM_MC_BEGIN(0, 1);
3113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3115 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3116 {
3117 IEM_MC_LOCAL(uint64_t, u64Tmp);
3118 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3119 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3120 }
3121 else
3122 {
3123 IEM_MC_LOCAL(uint32_t, u32Tmp);
3124 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3125 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3126 }
3127 IEM_MC_ADVANCE_RIP();
3128 IEM_MC_END();
3129 }
3130 else
3131 {
3132 /* [mem], XMM */
3133 IEM_MC_BEGIN(0, 2);
3134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3135 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3139 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3140 {
3141 IEM_MC_LOCAL(uint64_t, u64Tmp);
3142 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3143 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3144 }
3145 else
3146 {
3147 IEM_MC_LOCAL(uint32_t, u32Tmp);
3148 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3149 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3150 }
3151 IEM_MC_ADVANCE_RIP();
3152 IEM_MC_END();
3153 }
3154 return VINF_SUCCESS;
3155
3156 case 0: /* MMX */
3157 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3159 {
3160 /* greg, MMX */
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162 IEM_MC_BEGIN(0, 1);
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3165 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3166 {
3167 IEM_MC_LOCAL(uint64_t, u64Tmp);
3168 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3169 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3170 }
3171 else
3172 {
3173 IEM_MC_LOCAL(uint32_t, u32Tmp);
3174 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3175 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3176 }
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 else
3181 {
3182 /* [mem], MMX */
3183 IEM_MC_BEGIN(0, 2);
3184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3185 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3189 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3190 {
3191 IEM_MC_LOCAL(uint64_t, u64Tmp);
3192 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3193 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3194 }
3195 else
3196 {
3197 IEM_MC_LOCAL(uint32_t, u32Tmp);
3198 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3199 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3200 }
3201 IEM_MC_ADVANCE_RIP();
3202 IEM_MC_END();
3203 }
3204 return VINF_SUCCESS;
3205
3206 default:
3207 return IEMOP_RAISE_INVALID_OPCODE();
3208 }
3209}
3210
3211
3212/** Opcode 0x0f 0x7f. */
3213FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3214{
3215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3216 bool fAligned = false;
3217 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3218 {
3219 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3220 fAligned = true;
3221 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3222 if (fAligned)
3223 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3224 else
3225 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3227 {
3228 /*
3229 * Register, register.
3230 */
3231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3232 IEM_MC_BEGIN(0, 0);
3233 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3234 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3235 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
3236 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3237 IEM_MC_ADVANCE_RIP();
3238 IEM_MC_END();
3239 }
3240 else
3241 {
3242 /*
3243 * Register, memory.
3244 */
3245 IEM_MC_BEGIN(0, 2);
3246 IEM_MC_LOCAL(uint128_t, u128Tmp);
3247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3248
3249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3251 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3252 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3253
3254 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3255 if (fAligned)
3256 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3257 else
3258 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3259
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 }
3263 return VINF_SUCCESS;
3264
3265 case 0: /* MMX */
3266 IEMOP_MNEMONIC("movq Qq,Pq");
3267
3268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3269 {
3270 /*
3271 * Register, register.
3272 */
3273 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3274 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 IEM_MC_BEGIN(0, 1);
3277 IEM_MC_LOCAL(uint64_t, u64Tmp);
3278 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3279 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3280 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3281 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3282 IEM_MC_ADVANCE_RIP();
3283 IEM_MC_END();
3284 }
3285 else
3286 {
3287 /*
3288 * Register, memory.
3289 */
3290 IEM_MC_BEGIN(0, 2);
3291 IEM_MC_LOCAL(uint64_t, u64Tmp);
3292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3293
3294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3296 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3297 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3298
3299 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3300 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3301
3302 IEM_MC_ADVANCE_RIP();
3303 IEM_MC_END();
3304 }
3305 return VINF_SUCCESS;
3306
3307 default:
3308 return IEMOP_RAISE_INVALID_OPCODE();
3309 }
3310}
3311
3312
3313
3314/** Opcode 0x0f 0x80. */
3315FNIEMOP_DEF(iemOp_jo_Jv)
3316{
3317 IEMOP_MNEMONIC("jo Jv");
3318 IEMOP_HLP_MIN_386();
3319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3320 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3321 {
3322 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3323 IEMOP_HLP_NO_LOCK_PREFIX();
3324
3325 IEM_MC_BEGIN(0, 0);
3326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3327 IEM_MC_REL_JMP_S16(i16Imm);
3328 } IEM_MC_ELSE() {
3329 IEM_MC_ADVANCE_RIP();
3330 } IEM_MC_ENDIF();
3331 IEM_MC_END();
3332 }
3333 else
3334 {
3335 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3336 IEMOP_HLP_NO_LOCK_PREFIX();
3337
3338 IEM_MC_BEGIN(0, 0);
3339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3340 IEM_MC_REL_JMP_S32(i32Imm);
3341 } IEM_MC_ELSE() {
3342 IEM_MC_ADVANCE_RIP();
3343 } IEM_MC_ENDIF();
3344 IEM_MC_END();
3345 }
3346 return VINF_SUCCESS;
3347}
3348
3349
3350/** Opcode 0x0f 0x81. */
3351FNIEMOP_DEF(iemOp_jno_Jv)
3352{
3353 IEMOP_MNEMONIC("jno Jv");
3354 IEMOP_HLP_MIN_386();
3355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3356 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3357 {
3358 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3359 IEMOP_HLP_NO_LOCK_PREFIX();
3360
3361 IEM_MC_BEGIN(0, 0);
3362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3363 IEM_MC_ADVANCE_RIP();
3364 } IEM_MC_ELSE() {
3365 IEM_MC_REL_JMP_S16(i16Imm);
3366 } IEM_MC_ENDIF();
3367 IEM_MC_END();
3368 }
3369 else
3370 {
3371 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3372 IEMOP_HLP_NO_LOCK_PREFIX();
3373
3374 IEM_MC_BEGIN(0, 0);
3375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3376 IEM_MC_ADVANCE_RIP();
3377 } IEM_MC_ELSE() {
3378 IEM_MC_REL_JMP_S32(i32Imm);
3379 } IEM_MC_ENDIF();
3380 IEM_MC_END();
3381 }
3382 return VINF_SUCCESS;
3383}
3384
3385
3386/** Opcode 0x0f 0x82. */
3387FNIEMOP_DEF(iemOp_jc_Jv)
3388{
3389 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3390 IEMOP_HLP_MIN_386();
3391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3392 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3393 {
3394 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3395 IEMOP_HLP_NO_LOCK_PREFIX();
3396
3397 IEM_MC_BEGIN(0, 0);
3398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3399 IEM_MC_REL_JMP_S16(i16Imm);
3400 } IEM_MC_ELSE() {
3401 IEM_MC_ADVANCE_RIP();
3402 } IEM_MC_ENDIF();
3403 IEM_MC_END();
3404 }
3405 else
3406 {
3407 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3408 IEMOP_HLP_NO_LOCK_PREFIX();
3409
3410 IEM_MC_BEGIN(0, 0);
3411 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3412 IEM_MC_REL_JMP_S32(i32Imm);
3413 } IEM_MC_ELSE() {
3414 IEM_MC_ADVANCE_RIP();
3415 } IEM_MC_ENDIF();
3416 IEM_MC_END();
3417 }
3418 return VINF_SUCCESS;
3419}
3420
3421
3422/** Opcode 0x0f 0x83. */
3423FNIEMOP_DEF(iemOp_jnc_Jv)
3424{
3425 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3426 IEMOP_HLP_MIN_386();
3427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3428 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3429 {
3430 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3431 IEMOP_HLP_NO_LOCK_PREFIX();
3432
3433 IEM_MC_BEGIN(0, 0);
3434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3435 IEM_MC_ADVANCE_RIP();
3436 } IEM_MC_ELSE() {
3437 IEM_MC_REL_JMP_S16(i16Imm);
3438 } IEM_MC_ENDIF();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3444 IEMOP_HLP_NO_LOCK_PREFIX();
3445
3446 IEM_MC_BEGIN(0, 0);
3447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3448 IEM_MC_ADVANCE_RIP();
3449 } IEM_MC_ELSE() {
3450 IEM_MC_REL_JMP_S32(i32Imm);
3451 } IEM_MC_ENDIF();
3452 IEM_MC_END();
3453 }
3454 return VINF_SUCCESS;
3455}
3456
3457
3458/** Opcode 0x0f 0x84. */
3459FNIEMOP_DEF(iemOp_je_Jv)
3460{
3461 IEMOP_MNEMONIC("je/jz Jv");
3462 IEMOP_HLP_MIN_386();
3463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3464 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3465 {
3466 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3467 IEMOP_HLP_NO_LOCK_PREFIX();
3468
3469 IEM_MC_BEGIN(0, 0);
3470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3471 IEM_MC_REL_JMP_S16(i16Imm);
3472 } IEM_MC_ELSE() {
3473 IEM_MC_ADVANCE_RIP();
3474 } IEM_MC_ENDIF();
3475 IEM_MC_END();
3476 }
3477 else
3478 {
3479 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3480 IEMOP_HLP_NO_LOCK_PREFIX();
3481
3482 IEM_MC_BEGIN(0, 0);
3483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3484 IEM_MC_REL_JMP_S32(i32Imm);
3485 } IEM_MC_ELSE() {
3486 IEM_MC_ADVANCE_RIP();
3487 } IEM_MC_ENDIF();
3488 IEM_MC_END();
3489 }
3490 return VINF_SUCCESS;
3491}
3492
3493
3494/** Opcode 0x0f 0x85. */
3495FNIEMOP_DEF(iemOp_jne_Jv)
3496{
3497 IEMOP_MNEMONIC("jne/jnz Jv");
3498 IEMOP_HLP_MIN_386();
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3500 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3501 {
3502 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3503 IEMOP_HLP_NO_LOCK_PREFIX();
3504
3505 IEM_MC_BEGIN(0, 0);
3506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3507 IEM_MC_ADVANCE_RIP();
3508 } IEM_MC_ELSE() {
3509 IEM_MC_REL_JMP_S16(i16Imm);
3510 } IEM_MC_ENDIF();
3511 IEM_MC_END();
3512 }
3513 else
3514 {
3515 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3516 IEMOP_HLP_NO_LOCK_PREFIX();
3517
3518 IEM_MC_BEGIN(0, 0);
3519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3520 IEM_MC_ADVANCE_RIP();
3521 } IEM_MC_ELSE() {
3522 IEM_MC_REL_JMP_S32(i32Imm);
3523 } IEM_MC_ENDIF();
3524 IEM_MC_END();
3525 }
3526 return VINF_SUCCESS;
3527}
3528
3529
3530/** Opcode 0x0f 0x86. */
3531FNIEMOP_DEF(iemOp_jbe_Jv)
3532{
3533 IEMOP_MNEMONIC("jbe/jna Jv");
3534 IEMOP_HLP_MIN_386();
3535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3536 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3537 {
3538 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3539 IEMOP_HLP_NO_LOCK_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3543 IEM_MC_REL_JMP_S16(i16Imm);
3544 } IEM_MC_ELSE() {
3545 IEM_MC_ADVANCE_RIP();
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548 }
3549 else
3550 {
3551 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3552 IEMOP_HLP_NO_LOCK_PREFIX();
3553
3554 IEM_MC_BEGIN(0, 0);
3555 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3556 IEM_MC_REL_JMP_S32(i32Imm);
3557 } IEM_MC_ELSE() {
3558 IEM_MC_ADVANCE_RIP();
3559 } IEM_MC_ENDIF();
3560 IEM_MC_END();
3561 }
3562 return VINF_SUCCESS;
3563}
3564
3565
3566/** Opcode 0x0f 0x87. */
3567FNIEMOP_DEF(iemOp_jnbe_Jv)
3568{
3569 IEMOP_MNEMONIC("jnbe/ja Jv");
3570 IEMOP_HLP_MIN_386();
3571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3572 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3573 {
3574 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3575 IEMOP_HLP_NO_LOCK_PREFIX();
3576
3577 IEM_MC_BEGIN(0, 0);
3578 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3579 IEM_MC_ADVANCE_RIP();
3580 } IEM_MC_ELSE() {
3581 IEM_MC_REL_JMP_S16(i16Imm);
3582 } IEM_MC_ENDIF();
3583 IEM_MC_END();
3584 }
3585 else
3586 {
3587 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3588 IEMOP_HLP_NO_LOCK_PREFIX();
3589
3590 IEM_MC_BEGIN(0, 0);
3591 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3592 IEM_MC_ADVANCE_RIP();
3593 } IEM_MC_ELSE() {
3594 IEM_MC_REL_JMP_S32(i32Imm);
3595 } IEM_MC_ENDIF();
3596 IEM_MC_END();
3597 }
3598 return VINF_SUCCESS;
3599}
3600
3601
3602/** Opcode 0x0f 0x88. */
3603FNIEMOP_DEF(iemOp_js_Jv)
3604{
3605 IEMOP_MNEMONIC("js Jv");
3606 IEMOP_HLP_MIN_386();
3607 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3608 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3609 {
3610 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3611 IEMOP_HLP_NO_LOCK_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0);
3614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3615 IEM_MC_REL_JMP_S16(i16Imm);
3616 } IEM_MC_ELSE() {
3617 IEM_MC_ADVANCE_RIP();
3618 } IEM_MC_ENDIF();
3619 IEM_MC_END();
3620 }
3621 else
3622 {
3623 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3624 IEMOP_HLP_NO_LOCK_PREFIX();
3625
3626 IEM_MC_BEGIN(0, 0);
3627 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3628 IEM_MC_REL_JMP_S32(i32Imm);
3629 } IEM_MC_ELSE() {
3630 IEM_MC_ADVANCE_RIP();
3631 } IEM_MC_ENDIF();
3632 IEM_MC_END();
3633 }
3634 return VINF_SUCCESS;
3635}
3636
3637
3638/** Opcode 0x0f 0x89. */
3639FNIEMOP_DEF(iemOp_jns_Jv)
3640{
3641 IEMOP_MNEMONIC("jns Jv");
3642 IEMOP_HLP_MIN_386();
3643 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3644 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3645 {
3646 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3647 IEMOP_HLP_NO_LOCK_PREFIX();
3648
3649 IEM_MC_BEGIN(0, 0);
3650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3651 IEM_MC_ADVANCE_RIP();
3652 } IEM_MC_ELSE() {
3653 IEM_MC_REL_JMP_S16(i16Imm);
3654 } IEM_MC_ENDIF();
3655 IEM_MC_END();
3656 }
3657 else
3658 {
3659 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3660 IEMOP_HLP_NO_LOCK_PREFIX();
3661
3662 IEM_MC_BEGIN(0, 0);
3663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3664 IEM_MC_ADVANCE_RIP();
3665 } IEM_MC_ELSE() {
3666 IEM_MC_REL_JMP_S32(i32Imm);
3667 } IEM_MC_ENDIF();
3668 IEM_MC_END();
3669 }
3670 return VINF_SUCCESS;
3671}
3672
3673
3674/** Opcode 0x0f 0x8a. */
3675FNIEMOP_DEF(iemOp_jp_Jv)
3676{
3677 IEMOP_MNEMONIC("jp Jv");
3678 IEMOP_HLP_MIN_386();
3679 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3680 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3681 {
3682 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3683 IEMOP_HLP_NO_LOCK_PREFIX();
3684
3685 IEM_MC_BEGIN(0, 0);
3686 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3687 IEM_MC_REL_JMP_S16(i16Imm);
3688 } IEM_MC_ELSE() {
3689 IEM_MC_ADVANCE_RIP();
3690 } IEM_MC_ENDIF();
3691 IEM_MC_END();
3692 }
3693 else
3694 {
3695 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3696 IEMOP_HLP_NO_LOCK_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0);
3699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3700 IEM_MC_REL_JMP_S32(i32Imm);
3701 } IEM_MC_ELSE() {
3702 IEM_MC_ADVANCE_RIP();
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705 }
3706 return VINF_SUCCESS;
3707}
3708
3709
3710/** Opcode 0x0f 0x8b. */
3711FNIEMOP_DEF(iemOp_jnp_Jv)
3712{
3713 IEMOP_MNEMONIC("jo Jv");
3714 IEMOP_HLP_MIN_386();
3715 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3716 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3717 {
3718 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3719 IEMOP_HLP_NO_LOCK_PREFIX();
3720
3721 IEM_MC_BEGIN(0, 0);
3722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3723 IEM_MC_ADVANCE_RIP();
3724 } IEM_MC_ELSE() {
3725 IEM_MC_REL_JMP_S16(i16Imm);
3726 } IEM_MC_ENDIF();
3727 IEM_MC_END();
3728 }
3729 else
3730 {
3731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3732 IEMOP_HLP_NO_LOCK_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0);
3735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3736 IEM_MC_ADVANCE_RIP();
3737 } IEM_MC_ELSE() {
3738 IEM_MC_REL_JMP_S32(i32Imm);
3739 } IEM_MC_ENDIF();
3740 IEM_MC_END();
3741 }
3742 return VINF_SUCCESS;
3743}
3744
3745
3746/** Opcode 0x0f 0x8c. */
3747FNIEMOP_DEF(iemOp_jl_Jv)
3748{
3749 IEMOP_MNEMONIC("jl/jnge Jv");
3750 IEMOP_HLP_MIN_386();
3751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3752 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3753 {
3754 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3755 IEMOP_HLP_NO_LOCK_PREFIX();
3756
3757 IEM_MC_BEGIN(0, 0);
3758 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3759 IEM_MC_REL_JMP_S16(i16Imm);
3760 } IEM_MC_ELSE() {
3761 IEM_MC_ADVANCE_RIP();
3762 } IEM_MC_ENDIF();
3763 IEM_MC_END();
3764 }
3765 else
3766 {
3767 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3768 IEMOP_HLP_NO_LOCK_PREFIX();
3769
3770 IEM_MC_BEGIN(0, 0);
3771 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3772 IEM_MC_REL_JMP_S32(i32Imm);
3773 } IEM_MC_ELSE() {
3774 IEM_MC_ADVANCE_RIP();
3775 } IEM_MC_ENDIF();
3776 IEM_MC_END();
3777 }
3778 return VINF_SUCCESS;
3779}
3780
3781
3782/** Opcode 0x0f 0x8d. */
3783FNIEMOP_DEF(iemOp_jnl_Jv)
3784{
3785 IEMOP_MNEMONIC("jnl/jge Jv");
3786 IEMOP_HLP_MIN_386();
3787 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3788 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3789 {
3790 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3791 IEMOP_HLP_NO_LOCK_PREFIX();
3792
3793 IEM_MC_BEGIN(0, 0);
3794 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3795 IEM_MC_ADVANCE_RIP();
3796 } IEM_MC_ELSE() {
3797 IEM_MC_REL_JMP_S16(i16Imm);
3798 } IEM_MC_ENDIF();
3799 IEM_MC_END();
3800 }
3801 else
3802 {
3803 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3804 IEMOP_HLP_NO_LOCK_PREFIX();
3805
3806 IEM_MC_BEGIN(0, 0);
3807 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3808 IEM_MC_ADVANCE_RIP();
3809 } IEM_MC_ELSE() {
3810 IEM_MC_REL_JMP_S32(i32Imm);
3811 } IEM_MC_ENDIF();
3812 IEM_MC_END();
3813 }
3814 return VINF_SUCCESS;
3815}
3816
3817
3818/** Opcode 0x0f 0x8e. */
3819FNIEMOP_DEF(iemOp_jle_Jv)
3820{
3821 IEMOP_MNEMONIC("jle/jng Jv");
3822 IEMOP_HLP_MIN_386();
3823 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3824 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3825 {
3826 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3827 IEMOP_HLP_NO_LOCK_PREFIX();
3828
3829 IEM_MC_BEGIN(0, 0);
3830 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3831 IEM_MC_REL_JMP_S16(i16Imm);
3832 } IEM_MC_ELSE() {
3833 IEM_MC_ADVANCE_RIP();
3834 } IEM_MC_ENDIF();
3835 IEM_MC_END();
3836 }
3837 else
3838 {
3839 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3840 IEMOP_HLP_NO_LOCK_PREFIX();
3841
3842 IEM_MC_BEGIN(0, 0);
3843 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3844 IEM_MC_REL_JMP_S32(i32Imm);
3845 } IEM_MC_ELSE() {
3846 IEM_MC_ADVANCE_RIP();
3847 } IEM_MC_ENDIF();
3848 IEM_MC_END();
3849 }
3850 return VINF_SUCCESS;
3851}
3852
3853
3854/** Opcode 0x0f 0x8f. */
3855FNIEMOP_DEF(iemOp_jnle_Jv)
3856{
3857 IEMOP_MNEMONIC("jnle/jg Jv");
3858 IEMOP_HLP_MIN_386();
3859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3860 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3861 {
3862 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3863 IEMOP_HLP_NO_LOCK_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0);
3866 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3867 IEM_MC_ADVANCE_RIP();
3868 } IEM_MC_ELSE() {
3869 IEM_MC_REL_JMP_S16(i16Imm);
3870 } IEM_MC_ENDIF();
3871 IEM_MC_END();
3872 }
3873 else
3874 {
3875 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3876 IEMOP_HLP_NO_LOCK_PREFIX();
3877
3878 IEM_MC_BEGIN(0, 0);
3879 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3880 IEM_MC_ADVANCE_RIP();
3881 } IEM_MC_ELSE() {
3882 IEM_MC_REL_JMP_S32(i32Imm);
3883 } IEM_MC_ENDIF();
3884 IEM_MC_END();
3885 }
3886 return VINF_SUCCESS;
3887}
3888
3889
3890/** Opcode 0x0f 0x90. */
3891FNIEMOP_DEF(iemOp_seto_Eb)
3892{
3893 IEMOP_MNEMONIC("seto Eb");
3894 IEMOP_HLP_MIN_386();
3895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3896 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3897
3898 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3899 * any way. AMD says it's "unused", whatever that means. We're
3900 * ignoring for now. */
3901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3902 {
3903 /* register target */
3904 IEM_MC_BEGIN(0, 0);
3905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3906 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3907 } IEM_MC_ELSE() {
3908 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3909 } IEM_MC_ENDIF();
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 /* memory target */
3916 IEM_MC_BEGIN(0, 1);
3917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3920 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3921 } IEM_MC_ELSE() {
3922 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3923 } IEM_MC_ENDIF();
3924 IEM_MC_ADVANCE_RIP();
3925 IEM_MC_END();
3926 }
3927 return VINF_SUCCESS;
3928}
3929
3930
3931/** Opcode 0x0f 0x91. */
3932FNIEMOP_DEF(iemOp_setno_Eb)
3933{
3934 IEMOP_MNEMONIC("setno Eb");
3935 IEMOP_HLP_MIN_386();
3936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3937 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3938
3939 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3940 * any way. AMD says it's "unused", whatever that means. We're
3941 * ignoring for now. */
3942 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3943 {
3944 /* register target */
3945 IEM_MC_BEGIN(0, 0);
3946 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3947 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3948 } IEM_MC_ELSE() {
3949 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3950 } IEM_MC_ENDIF();
3951 IEM_MC_ADVANCE_RIP();
3952 IEM_MC_END();
3953 }
3954 else
3955 {
3956 /* memory target */
3957 IEM_MC_BEGIN(0, 1);
3958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3961 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3962 } IEM_MC_ELSE() {
3963 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3964 } IEM_MC_ENDIF();
3965 IEM_MC_ADVANCE_RIP();
3966 IEM_MC_END();
3967 }
3968 return VINF_SUCCESS;
3969}
3970
3971
3972/** Opcode 0x0f 0x92. */
3973FNIEMOP_DEF(iemOp_setc_Eb)
3974{
3975 IEMOP_MNEMONIC("setc Eb");
3976 IEMOP_HLP_MIN_386();
3977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3978 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3979
3980 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3981 * any way. AMD says it's "unused", whatever that means. We're
3982 * ignoring for now. */
3983 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3984 {
3985 /* register target */
3986 IEM_MC_BEGIN(0, 0);
3987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3988 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3989 } IEM_MC_ELSE() {
3990 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3991 } IEM_MC_ENDIF();
3992 IEM_MC_ADVANCE_RIP();
3993 IEM_MC_END();
3994 }
3995 else
3996 {
3997 /* memory target */
3998 IEM_MC_BEGIN(0, 1);
3999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4001 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4002 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4003 } IEM_MC_ELSE() {
4004 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4005 } IEM_MC_ENDIF();
4006 IEM_MC_ADVANCE_RIP();
4007 IEM_MC_END();
4008 }
4009 return VINF_SUCCESS;
4010}
4011
4012
4013/** Opcode 0x0f 0x93. */
4014FNIEMOP_DEF(iemOp_setnc_Eb)
4015{
4016 IEMOP_MNEMONIC("setnc Eb");
4017 IEMOP_HLP_MIN_386();
4018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4019 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4020
4021 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4022 * any way. AMD says it's "unused", whatever that means. We're
4023 * ignoring for now. */
4024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4025 {
4026 /* register target */
4027 IEM_MC_BEGIN(0, 0);
4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4029 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4030 } IEM_MC_ELSE() {
4031 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4032 } IEM_MC_ENDIF();
4033 IEM_MC_ADVANCE_RIP();
4034 IEM_MC_END();
4035 }
4036 else
4037 {
4038 /* memory target */
4039 IEM_MC_BEGIN(0, 1);
4040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4043 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4044 } IEM_MC_ELSE() {
4045 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4046 } IEM_MC_ENDIF();
4047 IEM_MC_ADVANCE_RIP();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x94. */
4055FNIEMOP_DEF(iemOp_sete_Eb)
4056{
4057 IEMOP_MNEMONIC("sete Eb");
4058 IEMOP_HLP_MIN_386();
4059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4060 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4061
4062 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4063 * any way. AMD says it's "unused", whatever that means. We're
4064 * ignoring for now. */
4065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4066 {
4067 /* register target */
4068 IEM_MC_BEGIN(0, 0);
4069 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4070 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4071 } IEM_MC_ELSE() {
4072 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4073 } IEM_MC_ENDIF();
4074 IEM_MC_ADVANCE_RIP();
4075 IEM_MC_END();
4076 }
4077 else
4078 {
4079 /* memory target */
4080 IEM_MC_BEGIN(0, 1);
4081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4084 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4085 } IEM_MC_ELSE() {
4086 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4087 } IEM_MC_ENDIF();
4088 IEM_MC_ADVANCE_RIP();
4089 IEM_MC_END();
4090 }
4091 return VINF_SUCCESS;
4092}
4093
4094
4095/** Opcode 0x0f 0x95. */
4096FNIEMOP_DEF(iemOp_setne_Eb)
4097{
4098 IEMOP_MNEMONIC("setne Eb");
4099 IEMOP_HLP_MIN_386();
4100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4101 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4102
4103 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4104 * any way. AMD says it's "unused", whatever that means. We're
4105 * ignoring for now. */
4106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4107 {
4108 /* register target */
4109 IEM_MC_BEGIN(0, 0);
4110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4112 } IEM_MC_ELSE() {
4113 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4114 } IEM_MC_ENDIF();
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 /* memory target */
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4124 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4125 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4126 } IEM_MC_ELSE() {
4127 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4128 } IEM_MC_ENDIF();
4129 IEM_MC_ADVANCE_RIP();
4130 IEM_MC_END();
4131 }
4132 return VINF_SUCCESS;
4133}
4134
4135
4136/** Opcode 0x0f 0x96. */
4137FNIEMOP_DEF(iemOp_setbe_Eb)
4138{
4139 IEMOP_MNEMONIC("setbe Eb");
4140 IEMOP_HLP_MIN_386();
4141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4142 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4143
4144 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4145 * any way. AMD says it's "unused", whatever that means. We're
4146 * ignoring for now. */
4147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4148 {
4149 /* register target */
4150 IEM_MC_BEGIN(0, 0);
4151 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4152 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4153 } IEM_MC_ELSE() {
4154 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4155 } IEM_MC_ENDIF();
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 }
4159 else
4160 {
4161 /* memory target */
4162 IEM_MC_BEGIN(0, 1);
4163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4165 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4166 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4167 } IEM_MC_ELSE() {
4168 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4169 } IEM_MC_ENDIF();
4170 IEM_MC_ADVANCE_RIP();
4171 IEM_MC_END();
4172 }
4173 return VINF_SUCCESS;
4174}
4175
4176
4177/** Opcode 0x0f 0x97. */
4178FNIEMOP_DEF(iemOp_setnbe_Eb)
4179{
4180 IEMOP_MNEMONIC("setnbe Eb");
4181 IEMOP_HLP_MIN_386();
4182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4183 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4184
4185 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4186 * any way. AMD says it's "unused", whatever that means. We're
4187 * ignoring for now. */
4188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4189 {
4190 /* register target */
4191 IEM_MC_BEGIN(0, 0);
4192 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4196 } IEM_MC_ENDIF();
4197 IEM_MC_ADVANCE_RIP();
4198 IEM_MC_END();
4199 }
4200 else
4201 {
4202 /* memory target */
4203 IEM_MC_BEGIN(0, 1);
4204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4206 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4207 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ELSE() {
4209 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4210 } IEM_MC_ENDIF();
4211 IEM_MC_ADVANCE_RIP();
4212 IEM_MC_END();
4213 }
4214 return VINF_SUCCESS;
4215}
4216
4217
4218/** Opcode 0x0f 0x98. */
4219FNIEMOP_DEF(iemOp_sets_Eb)
4220{
4221 IEMOP_MNEMONIC("sets Eb");
4222 IEMOP_HLP_MIN_386();
4223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4224 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4225
4226 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4227 * any way. AMD says it's "unused", whatever that means. We're
4228 * ignoring for now. */
4229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4230 {
4231 /* register target */
4232 IEM_MC_BEGIN(0, 0);
4233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4235 } IEM_MC_ELSE() {
4236 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4237 } IEM_MC_ENDIF();
4238 IEM_MC_ADVANCE_RIP();
4239 IEM_MC_END();
4240 }
4241 else
4242 {
4243 /* memory target */
4244 IEM_MC_BEGIN(0, 1);
4245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4247 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4248 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4249 } IEM_MC_ELSE() {
4250 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4251 } IEM_MC_ENDIF();
4252 IEM_MC_ADVANCE_RIP();
4253 IEM_MC_END();
4254 }
4255 return VINF_SUCCESS;
4256}
4257
4258
4259/** Opcode 0x0f 0x99. */
4260FNIEMOP_DEF(iemOp_setns_Eb)
4261{
4262 IEMOP_MNEMONIC("setns Eb");
4263 IEMOP_HLP_MIN_386();
4264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4265 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4266
4267 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4268 * any way. AMD says it's "unused", whatever that means. We're
4269 * ignoring for now. */
4270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4271 {
4272 /* register target */
4273 IEM_MC_BEGIN(0, 0);
4274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4275 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4276 } IEM_MC_ELSE() {
4277 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4278 } IEM_MC_ENDIF();
4279 IEM_MC_ADVANCE_RIP();
4280 IEM_MC_END();
4281 }
4282 else
4283 {
4284 /* memory target */
4285 IEM_MC_BEGIN(0, 1);
4286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x9a. */
4301FNIEMOP_DEF(iemOp_setp_Eb)
4302{
4303 IEMOP_MNEMONIC("setnp Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4307
4308 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4309 * any way. AMD says it's "unused", whatever that means. We're
4310 * ignoring for now. */
4311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4312 {
4313 /* register target */
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4330 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4331 } IEM_MC_ELSE() {
4332 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4333 } IEM_MC_ENDIF();
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 return VINF_SUCCESS;
4338}
4339
4340
4341/** Opcode 0x0f 0x9b. */
4342FNIEMOP_DEF(iemOp_setnp_Eb)
4343{
4344 IEMOP_MNEMONIC("setnp Eb");
4345 IEMOP_HLP_MIN_386();
4346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4347 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEM_MC_BEGIN(0, 0);
4356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4357 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4358 } IEM_MC_ELSE() {
4359 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4360 } IEM_MC_ENDIF();
4361 IEM_MC_ADVANCE_RIP();
4362 IEM_MC_END();
4363 }
4364 else
4365 {
4366 /* memory target */
4367 IEM_MC_BEGIN(0, 1);
4368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4370 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4371 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4372 } IEM_MC_ELSE() {
4373 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ENDIF();
4375 IEM_MC_ADVANCE_RIP();
4376 IEM_MC_END();
4377 }
4378 return VINF_SUCCESS;
4379}
4380
4381
4382/** Opcode 0x0f 0x9c. */
4383FNIEMOP_DEF(iemOp_setl_Eb)
4384{
4385 IEMOP_MNEMONIC("setl Eb");
4386 IEMOP_HLP_MIN_386();
4387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4388 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4389
4390 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4391 * any way. AMD says it's "unused", whatever that means. We're
4392 * ignoring for now. */
4393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4394 {
4395 /* register target */
4396 IEM_MC_BEGIN(0, 0);
4397 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4398 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4399 } IEM_MC_ELSE() {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4401 } IEM_MC_ENDIF();
4402 IEM_MC_ADVANCE_RIP();
4403 IEM_MC_END();
4404 }
4405 else
4406 {
4407 /* memory target */
4408 IEM_MC_BEGIN(0, 1);
4409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4411 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4412 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4413 } IEM_MC_ELSE() {
4414 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4415 } IEM_MC_ENDIF();
4416 IEM_MC_ADVANCE_RIP();
4417 IEM_MC_END();
4418 }
4419 return VINF_SUCCESS;
4420}
4421
4422
4423/** Opcode 0x0f 0x9d. */
4424FNIEMOP_DEF(iemOp_setnl_Eb)
4425{
4426 IEMOP_MNEMONIC("setnl Eb");
4427 IEMOP_HLP_MIN_386();
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4430
4431 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4432 * any way. AMD says it's "unused", whatever that means. We're
4433 * ignoring for now. */
4434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4435 {
4436 /* register target */
4437 IEM_MC_BEGIN(0, 0);
4438 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4439 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4440 } IEM_MC_ELSE() {
4441 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4442 } IEM_MC_ENDIF();
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 else
4447 {
4448 /* memory target */
4449 IEM_MC_BEGIN(0, 1);
4450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4452 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4453 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4454 } IEM_MC_ELSE() {
4455 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4456 } IEM_MC_ENDIF();
4457 IEM_MC_ADVANCE_RIP();
4458 IEM_MC_END();
4459 }
4460 return VINF_SUCCESS;
4461}
4462
4463
4464/** Opcode 0x0f 0x9e. */
4465FNIEMOP_DEF(iemOp_setle_Eb)
4466{
4467 IEMOP_MNEMONIC("setle Eb");
4468 IEMOP_HLP_MIN_386();
4469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4470 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4471
4472 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4473 * any way. AMD says it's "unused", whatever that means. We're
4474 * ignoring for now. */
4475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4476 {
4477 /* register target */
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4481 } IEM_MC_ELSE() {
4482 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4483 } IEM_MC_ENDIF();
4484 IEM_MC_ADVANCE_RIP();
4485 IEM_MC_END();
4486 }
4487 else
4488 {
4489 /* memory target */
4490 IEM_MC_BEGIN(0, 1);
4491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4493 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4494 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4495 } IEM_MC_ELSE() {
4496 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4497 } IEM_MC_ENDIF();
4498 IEM_MC_ADVANCE_RIP();
4499 IEM_MC_END();
4500 }
4501 return VINF_SUCCESS;
4502}
4503
4504
4505/** Opcode 0x0f 0x9f. */
4506FNIEMOP_DEF(iemOp_setnle_Eb)
4507{
4508 IEMOP_MNEMONIC("setnle Eb");
4509 IEMOP_HLP_MIN_386();
4510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4511 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4512
4513 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4514 * any way. AMD says it's "unused", whatever that means. We're
4515 * ignoring for now. */
4516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4517 {
4518 /* register target */
4519 IEM_MC_BEGIN(0, 0);
4520 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4521 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4522 } IEM_MC_ELSE() {
4523 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4524 } IEM_MC_ENDIF();
4525 IEM_MC_ADVANCE_RIP();
4526 IEM_MC_END();
4527 }
4528 else
4529 {
4530 /* memory target */
4531 IEM_MC_BEGIN(0, 1);
4532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4534 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4535 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4536 } IEM_MC_ELSE() {
4537 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4538 } IEM_MC_ENDIF();
4539 IEM_MC_ADVANCE_RIP();
4540 IEM_MC_END();
4541 }
4542 return VINF_SUCCESS;
4543}
4544
4545
4546/**
4547 * Common 'push segment-register' helper.
4548 */
4549FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4550{
4551 IEMOP_HLP_NO_LOCK_PREFIX();
4552 if (iReg < X86_SREG_FS)
4553 IEMOP_HLP_NO_64BIT();
4554 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4555
4556 switch (pIemCpu->enmEffOpSize)
4557 {
4558 case IEMMODE_16BIT:
4559 IEM_MC_BEGIN(0, 1);
4560 IEM_MC_LOCAL(uint16_t, u16Value);
4561 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4562 IEM_MC_PUSH_U16(u16Value);
4563 IEM_MC_ADVANCE_RIP();
4564 IEM_MC_END();
4565 break;
4566
4567 case IEMMODE_32BIT:
4568 IEM_MC_BEGIN(0, 1);
4569 IEM_MC_LOCAL(uint32_t, u32Value);
4570 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4571 IEM_MC_PUSH_U32_SREG(u32Value);
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 break;
4575
4576 case IEMMODE_64BIT:
4577 IEM_MC_BEGIN(0, 1);
4578 IEM_MC_LOCAL(uint64_t, u64Value);
4579 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4580 IEM_MC_PUSH_U64(u64Value);
4581 IEM_MC_ADVANCE_RIP();
4582 IEM_MC_END();
4583 break;
4584 }
4585
4586 return VINF_SUCCESS;
4587}
4588
4589
4590/** Opcode 0x0f 0xa0. */
4591FNIEMOP_DEF(iemOp_push_fs)
4592{
4593 IEMOP_MNEMONIC("push fs");
4594 IEMOP_HLP_MIN_386();
4595 IEMOP_HLP_NO_LOCK_PREFIX();
4596 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4597}
4598
4599
4600/** Opcode 0x0f 0xa1. */
4601FNIEMOP_DEF(iemOp_pop_fs)
4602{
4603 IEMOP_MNEMONIC("pop fs");
4604 IEMOP_HLP_MIN_386();
4605 IEMOP_HLP_NO_LOCK_PREFIX();
4606 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4607}
4608
4609
4610/** Opcode 0x0f 0xa2. */
4611FNIEMOP_DEF(iemOp_cpuid)
4612{
4613 IEMOP_MNEMONIC("cpuid");
4614 IEMOP_HLP_MIN_486(); /* not all 486es. */
4615 IEMOP_HLP_NO_LOCK_PREFIX();
4616 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4617}
4618
4619
4620/**
4621 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4622 * iemOp_bts_Ev_Gv.
4623 */
4624FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4625{
4626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4628
4629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4630 {
4631 /* register destination. */
4632 IEMOP_HLP_NO_LOCK_PREFIX();
4633 switch (pIemCpu->enmEffOpSize)
4634 {
4635 case IEMMODE_16BIT:
4636 IEM_MC_BEGIN(3, 0);
4637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4638 IEM_MC_ARG(uint16_t, u16Src, 1);
4639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4640
4641 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4642 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4643 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4644 IEM_MC_REF_EFLAGS(pEFlags);
4645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4646
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 return VINF_SUCCESS;
4650
4651 case IEMMODE_32BIT:
4652 IEM_MC_BEGIN(3, 0);
4653 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4654 IEM_MC_ARG(uint32_t, u32Src, 1);
4655 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4656
4657 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4658 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4659 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4660 IEM_MC_REF_EFLAGS(pEFlags);
4661 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4662
4663 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4664 IEM_MC_ADVANCE_RIP();
4665 IEM_MC_END();
4666 return VINF_SUCCESS;
4667
4668 case IEMMODE_64BIT:
4669 IEM_MC_BEGIN(3, 0);
4670 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4671 IEM_MC_ARG(uint64_t, u64Src, 1);
4672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4673
4674 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4675 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4676 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4677 IEM_MC_REF_EFLAGS(pEFlags);
4678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4679
4680 IEM_MC_ADVANCE_RIP();
4681 IEM_MC_END();
4682 return VINF_SUCCESS;
4683
4684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4685 }
4686 }
4687 else
4688 {
4689 /* memory destination. */
4690
4691 uint32_t fAccess;
4692 if (pImpl->pfnLockedU16)
4693 fAccess = IEM_ACCESS_DATA_RW;
4694 else /* BT */
4695 {
4696 IEMOP_HLP_NO_LOCK_PREFIX();
4697 fAccess = IEM_ACCESS_DATA_R;
4698 }
4699
4700 NOREF(fAccess);
4701
4702 /** @todo test negative bit offsets! */
4703 switch (pIemCpu->enmEffOpSize)
4704 {
4705 case IEMMODE_16BIT:
4706 IEM_MC_BEGIN(3, 2);
4707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4708 IEM_MC_ARG(uint16_t, u16Src, 1);
4709 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4711 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4712
4713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4714 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4715 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4716 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4717 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4718 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4719 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4720 IEM_MC_FETCH_EFLAGS(EFlags);
4721
4722 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4723 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4724 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4725 else
4726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4727 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4728
4729 IEM_MC_COMMIT_EFLAGS(EFlags);
4730 IEM_MC_ADVANCE_RIP();
4731 IEM_MC_END();
4732 return VINF_SUCCESS;
4733
4734 case IEMMODE_32BIT:
4735 IEM_MC_BEGIN(3, 2);
4736 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4737 IEM_MC_ARG(uint32_t, u32Src, 1);
4738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4740 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4741
4742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4743 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4744 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4745 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4746 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4747 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4748 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4749 IEM_MC_FETCH_EFLAGS(EFlags);
4750
4751 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4752 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4754 else
4755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4757
4758 IEM_MC_COMMIT_EFLAGS(EFlags);
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 return VINF_SUCCESS;
4762
4763 case IEMMODE_64BIT:
4764 IEM_MC_BEGIN(3, 2);
4765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4766 IEM_MC_ARG(uint64_t, u64Src, 1);
4767 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4769 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4770
4771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4772 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4773 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4774 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4775 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4776 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4777 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4778 IEM_MC_FETCH_EFLAGS(EFlags);
4779
4780 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4781 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4783 else
4784 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4786
4787 IEM_MC_COMMIT_EFLAGS(EFlags);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 return VINF_SUCCESS;
4791
4792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4793 }
4794 }
4795}
4796
4797
4798/** Opcode 0x0f 0xa3. */
4799FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4800{
4801 IEMOP_MNEMONIC("bt Gv,Gv");
4802 IEMOP_HLP_MIN_386();
4803 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4804}
4805
4806
4807/**
4808 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4809 */
4810FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4811{
4812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4813 IEMOP_HLP_NO_LOCK_PREFIX();
4814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4815
4816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4817 {
4818 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4819 IEMOP_HLP_NO_LOCK_PREFIX();
4820
4821 switch (pIemCpu->enmEffOpSize)
4822 {
4823 case IEMMODE_16BIT:
4824 IEM_MC_BEGIN(4, 0);
4825 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4826 IEM_MC_ARG(uint16_t, u16Src, 1);
4827 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4828 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4829
4830 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4831 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4832 IEM_MC_REF_EFLAGS(pEFlags);
4833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4834
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 return VINF_SUCCESS;
4838
4839 case IEMMODE_32BIT:
4840 IEM_MC_BEGIN(4, 0);
4841 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4842 IEM_MC_ARG(uint32_t, u32Src, 1);
4843 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4844 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4845
4846 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4847 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4848 IEM_MC_REF_EFLAGS(pEFlags);
4849 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4850
4851 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 return VINF_SUCCESS;
4855
4856 case IEMMODE_64BIT:
4857 IEM_MC_BEGIN(4, 0);
4858 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4859 IEM_MC_ARG(uint64_t, u64Src, 1);
4860 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4861 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4862
4863 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4864 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4865 IEM_MC_REF_EFLAGS(pEFlags);
4866 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4867
4868 IEM_MC_ADVANCE_RIP();
4869 IEM_MC_END();
4870 return VINF_SUCCESS;
4871
4872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4873 }
4874 }
4875 else
4876 {
4877 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4878
4879 switch (pIemCpu->enmEffOpSize)
4880 {
4881 case IEMMODE_16BIT:
4882 IEM_MC_BEGIN(4, 2);
4883 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4884 IEM_MC_ARG(uint16_t, u16Src, 1);
4885 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4886 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4888
4889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4890 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4891 IEM_MC_ASSIGN(cShiftArg, cShift);
4892 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4893 IEM_MC_FETCH_EFLAGS(EFlags);
4894 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4895 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4896
4897 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4898 IEM_MC_COMMIT_EFLAGS(EFlags);
4899 IEM_MC_ADVANCE_RIP();
4900 IEM_MC_END();
4901 return VINF_SUCCESS;
4902
4903 case IEMMODE_32BIT:
4904 IEM_MC_BEGIN(4, 2);
4905 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4906 IEM_MC_ARG(uint32_t, u32Src, 1);
4907 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4908 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4910
4911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4912 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4913 IEM_MC_ASSIGN(cShiftArg, cShift);
4914 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4915 IEM_MC_FETCH_EFLAGS(EFlags);
4916 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4917 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4918
4919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4920 IEM_MC_COMMIT_EFLAGS(EFlags);
4921 IEM_MC_ADVANCE_RIP();
4922 IEM_MC_END();
4923 return VINF_SUCCESS;
4924
4925 case IEMMODE_64BIT:
4926 IEM_MC_BEGIN(4, 2);
4927 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4928 IEM_MC_ARG(uint64_t, u64Src, 1);
4929 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4932
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4934 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4935 IEM_MC_ASSIGN(cShiftArg, cShift);
4936 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4937 IEM_MC_FETCH_EFLAGS(EFlags);
4938 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4939 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4940
4941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4942 IEM_MC_COMMIT_EFLAGS(EFlags);
4943 IEM_MC_ADVANCE_RIP();
4944 IEM_MC_END();
4945 return VINF_SUCCESS;
4946
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4948 }
4949 }
4950}
4951
4952
4953/**
4954 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4955 */
4956FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4957{
4958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4959 IEMOP_HLP_NO_LOCK_PREFIX();
4960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4961
4962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4963 {
4964 IEMOP_HLP_NO_LOCK_PREFIX();
4965
4966 switch (pIemCpu->enmEffOpSize)
4967 {
4968 case IEMMODE_16BIT:
4969 IEM_MC_BEGIN(4, 0);
4970 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4971 IEM_MC_ARG(uint16_t, u16Src, 1);
4972 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4973 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4974
4975 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4976 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4977 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4978 IEM_MC_REF_EFLAGS(pEFlags);
4979 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4980
4981 IEM_MC_ADVANCE_RIP();
4982 IEM_MC_END();
4983 return VINF_SUCCESS;
4984
4985 case IEMMODE_32BIT:
4986 IEM_MC_BEGIN(4, 0);
4987 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4988 IEM_MC_ARG(uint32_t, u32Src, 1);
4989 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4990 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4991
4992 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4993 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4994 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4995 IEM_MC_REF_EFLAGS(pEFlags);
4996 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4997
4998 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4999 IEM_MC_ADVANCE_RIP();
5000 IEM_MC_END();
5001 return VINF_SUCCESS;
5002
5003 case IEMMODE_64BIT:
5004 IEM_MC_BEGIN(4, 0);
5005 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5006 IEM_MC_ARG(uint64_t, u64Src, 1);
5007 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5008 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5009
5010 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5011 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5012 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5013 IEM_MC_REF_EFLAGS(pEFlags);
5014 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5015
5016 IEM_MC_ADVANCE_RIP();
5017 IEM_MC_END();
5018 return VINF_SUCCESS;
5019
5020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5021 }
5022 }
5023 else
5024 {
5025 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
5026
5027 switch (pIemCpu->enmEffOpSize)
5028 {
5029 case IEMMODE_16BIT:
5030 IEM_MC_BEGIN(4, 2);
5031 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5032 IEM_MC_ARG(uint16_t, u16Src, 1);
5033 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5034 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5036
5037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5038 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5039 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5040 IEM_MC_FETCH_EFLAGS(EFlags);
5041 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5042 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5043
5044 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5045 IEM_MC_COMMIT_EFLAGS(EFlags);
5046 IEM_MC_ADVANCE_RIP();
5047 IEM_MC_END();
5048 return VINF_SUCCESS;
5049
5050 case IEMMODE_32BIT:
5051 IEM_MC_BEGIN(4, 2);
5052 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5053 IEM_MC_ARG(uint32_t, u32Src, 1);
5054 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5055 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5057
5058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5059 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5060 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5061 IEM_MC_FETCH_EFLAGS(EFlags);
5062 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5063 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5064
5065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5066 IEM_MC_COMMIT_EFLAGS(EFlags);
5067 IEM_MC_ADVANCE_RIP();
5068 IEM_MC_END();
5069 return VINF_SUCCESS;
5070
5071 case IEMMODE_64BIT:
5072 IEM_MC_BEGIN(4, 2);
5073 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5074 IEM_MC_ARG(uint64_t, u64Src, 1);
5075 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5076 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5078
5079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5080 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5081 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5082 IEM_MC_FETCH_EFLAGS(EFlags);
5083 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5084 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5085
5086 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5087 IEM_MC_COMMIT_EFLAGS(EFlags);
5088 IEM_MC_ADVANCE_RIP();
5089 IEM_MC_END();
5090 return VINF_SUCCESS;
5091
5092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5093 }
5094 }
5095}
5096
5097
5098
5099/** Opcode 0x0f 0xa4. */
5100FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5101{
5102 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5103 IEMOP_HLP_MIN_386();
5104 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5105}
5106
5107
5108/** Opcode 0x0f 0xa5. */
5109FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5110{
5111 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5112 IEMOP_HLP_MIN_386();
5113 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5114}
5115
5116
5117/** Opcode 0x0f 0xa8. */
5118FNIEMOP_DEF(iemOp_push_gs)
5119{
5120 IEMOP_MNEMONIC("push gs");
5121 IEMOP_HLP_MIN_386();
5122 IEMOP_HLP_NO_LOCK_PREFIX();
5123 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5124}
5125
5126
5127/** Opcode 0x0f 0xa9. */
5128FNIEMOP_DEF(iemOp_pop_gs)
5129{
5130 IEMOP_MNEMONIC("pop gs");
5131 IEMOP_HLP_MIN_386();
5132 IEMOP_HLP_NO_LOCK_PREFIX();
5133 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5134}
5135
5136
5137/** Opcode 0x0f 0xaa. */
5138FNIEMOP_STUB(iemOp_rsm);
5139//IEMOP_HLP_MIN_386();
5140
5141
5142/** Opcode 0x0f 0xab. */
5143FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5144{
5145 IEMOP_MNEMONIC("bts Ev,Gv");
5146 IEMOP_HLP_MIN_386();
5147 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5148}
5149
5150
5151/** Opcode 0x0f 0xac. */
5152FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5153{
5154 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5155 IEMOP_HLP_MIN_386();
5156 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5157}
5158
5159
5160/** Opcode 0x0f 0xad. */
5161FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5162{
5163 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5164 IEMOP_HLP_MIN_386();
5165 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5166}
5167
5168
5169/** Opcode 0x0f 0xae mem/0. */
5170FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5171{
5172 IEMOP_MNEMONIC("fxsave m512");
5173 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5174 return IEMOP_RAISE_INVALID_OPCODE();
5175
5176 IEM_MC_BEGIN(3, 1);
5177 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5178 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5179 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5182 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5183 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5184 IEM_MC_END();
5185 return VINF_SUCCESS;
5186}
5187
5188
5189/** Opcode 0x0f 0xae mem/1. */
5190FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5191{
5192 IEMOP_MNEMONIC("fxrstor m512");
5193 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5194 return IEMOP_RAISE_INVALID_OPCODE();
5195
5196 IEM_MC_BEGIN(3, 1);
5197 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5198 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5199 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5203 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206}
5207
5208
5209/** Opcode 0x0f 0xae mem/2. */
5210FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5211
5212/** Opcode 0x0f 0xae mem/3. */
5213FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5214
5215/** Opcode 0x0f 0xae mem/4. */
5216FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5217
5218/** Opcode 0x0f 0xae mem/5. */
5219FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5220
5221/** Opcode 0x0f 0xae mem/6. */
5222FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5223
5224/** Opcode 0x0f 0xae mem/7. */
5225FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5226
5227
5228/** Opcode 0x0f 0xae 11b/5. */
5229FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5230{
5231 IEMOP_MNEMONIC("lfence");
5232 IEMOP_HLP_NO_LOCK_PREFIX();
5233 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5234 return IEMOP_RAISE_INVALID_OPCODE();
5235
5236 IEM_MC_BEGIN(0, 0);
5237 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5238 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5239 else
5240 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5241 IEM_MC_ADVANCE_RIP();
5242 IEM_MC_END();
5243 return VINF_SUCCESS;
5244}
5245
5246
5247/** Opcode 0x0f 0xae 11b/6. */
5248FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5249{
5250 IEMOP_MNEMONIC("mfence");
5251 IEMOP_HLP_NO_LOCK_PREFIX();
5252 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5253 return IEMOP_RAISE_INVALID_OPCODE();
5254
5255 IEM_MC_BEGIN(0, 0);
5256 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5257 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5258 else
5259 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5260 IEM_MC_ADVANCE_RIP();
5261 IEM_MC_END();
5262 return VINF_SUCCESS;
5263}
5264
5265
5266/** Opcode 0x0f 0xae 11b/7. */
5267FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5268{
5269 IEMOP_MNEMONIC("sfence");
5270 IEMOP_HLP_NO_LOCK_PREFIX();
5271 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5272 return IEMOP_RAISE_INVALID_OPCODE();
5273
5274 IEM_MC_BEGIN(0, 0);
5275 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5276 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5277 else
5278 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5279 IEM_MC_ADVANCE_RIP();
5280 IEM_MC_END();
5281 return VINF_SUCCESS;
5282}
5283
5284
5285/** Opcode 0xf3 0x0f 0xae 11b/0. */
5286FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5287
5288/** Opcode 0xf3 0x0f 0xae 11b/1. */
5289FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5290
5291/** Opcode 0xf3 0x0f 0xae 11b/2. */
5292FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5293
5294/** Opcode 0xf3 0x0f 0xae 11b/3. */
5295FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5296
5297
5298/** Opcode 0x0f 0xae. */
5299FNIEMOP_DEF(iemOp_Grp15)
5300{
5301 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5303 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5304 {
5305 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5306 {
5307 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5308 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5309 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5310 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5311 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5312 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5313 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5314 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 }
5318 else
5319 {
5320 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5321 {
5322 case 0:
5323 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5324 {
5325 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5326 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5327 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5328 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5329 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5330 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5331 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5332 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5334 }
5335 break;
5336
5337 case IEM_OP_PRF_REPZ:
5338 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5339 {
5340 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5341 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5342 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5343 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5344 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5345 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5346 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5347 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5349 }
5350 break;
5351
5352 default:
5353 return IEMOP_RAISE_INVALID_OPCODE();
5354 }
5355 }
5356}
5357
5358
5359/** Opcode 0x0f 0xaf. */
5360FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5361{
5362 IEMOP_MNEMONIC("imul Gv,Ev");
5363 IEMOP_HLP_MIN_386();
5364 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5365 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5366}
5367
5368
5369/** Opcode 0x0f 0xb0. */
5370FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5371{
5372 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5373 IEMOP_HLP_MIN_486();
5374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5375
5376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5377 {
5378 IEMOP_HLP_DONE_DECODING();
5379 IEM_MC_BEGIN(4, 0);
5380 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5381 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5382 IEM_MC_ARG(uint8_t, u8Src, 2);
5383 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5384
5385 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5386 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5387 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5388 IEM_MC_REF_EFLAGS(pEFlags);
5389 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5390 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5391 else
5392 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5393
5394 IEM_MC_ADVANCE_RIP();
5395 IEM_MC_END();
5396 }
5397 else
5398 {
5399 IEM_MC_BEGIN(4, 3);
5400 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5401 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5402 IEM_MC_ARG(uint8_t, u8Src, 2);
5403 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5405 IEM_MC_LOCAL(uint8_t, u8Al);
5406
5407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5408 IEMOP_HLP_DONE_DECODING();
5409 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5410 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5411 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5412 IEM_MC_FETCH_EFLAGS(EFlags);
5413 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5414 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5415 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5416 else
5417 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5418
5419 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5420 IEM_MC_COMMIT_EFLAGS(EFlags);
5421 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5422 IEM_MC_ADVANCE_RIP();
5423 IEM_MC_END();
5424 }
5425 return VINF_SUCCESS;
5426}
5427
5428/** Opcode 0x0f 0xb1. */
5429FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5430{
5431 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5432 IEMOP_HLP_MIN_486();
5433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5434
5435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5436 {
5437 IEMOP_HLP_DONE_DECODING();
5438 switch (pIemCpu->enmEffOpSize)
5439 {
5440 case IEMMODE_16BIT:
5441 IEM_MC_BEGIN(4, 0);
5442 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5443 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5444 IEM_MC_ARG(uint16_t, u16Src, 2);
5445 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5446
5447 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5448 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5449 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5450 IEM_MC_REF_EFLAGS(pEFlags);
5451 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5452 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5453 else
5454 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5455
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 return VINF_SUCCESS;
5459
5460 case IEMMODE_32BIT:
5461 IEM_MC_BEGIN(4, 0);
5462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5463 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5464 IEM_MC_ARG(uint32_t, u32Src, 2);
5465 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5466
5467 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5468 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5469 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5470 IEM_MC_REF_EFLAGS(pEFlags);
5471 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5472 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5473 else
5474 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5475
5476 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5477 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5478 IEM_MC_ADVANCE_RIP();
5479 IEM_MC_END();
5480 return VINF_SUCCESS;
5481
5482 case IEMMODE_64BIT:
5483 IEM_MC_BEGIN(4, 0);
5484 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5485 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5486#ifdef RT_ARCH_X86
5487 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5488#else
5489 IEM_MC_ARG(uint64_t, u64Src, 2);
5490#endif
5491 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5492
5493 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5494 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5495 IEM_MC_REF_EFLAGS(pEFlags);
5496#ifdef RT_ARCH_X86
5497 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5498 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5499 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5500 else
5501 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5502#else
5503 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5504 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5506 else
5507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5508#endif
5509
5510 IEM_MC_ADVANCE_RIP();
5511 IEM_MC_END();
5512 return VINF_SUCCESS;
5513
5514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5515 }
5516 }
5517 else
5518 {
5519 switch (pIemCpu->enmEffOpSize)
5520 {
5521 case IEMMODE_16BIT:
5522 IEM_MC_BEGIN(4, 3);
5523 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5524 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5525 IEM_MC_ARG(uint16_t, u16Src, 2);
5526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5528 IEM_MC_LOCAL(uint16_t, u16Ax);
5529
5530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5531 IEMOP_HLP_DONE_DECODING();
5532 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5533 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5534 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5535 IEM_MC_FETCH_EFLAGS(EFlags);
5536 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5537 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5538 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5539 else
5540 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5541
5542 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5543 IEM_MC_COMMIT_EFLAGS(EFlags);
5544 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548
5549 case IEMMODE_32BIT:
5550 IEM_MC_BEGIN(4, 3);
5551 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5552 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5553 IEM_MC_ARG(uint32_t, u32Src, 2);
5554 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5556 IEM_MC_LOCAL(uint32_t, u32Eax);
5557
5558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5559 IEMOP_HLP_DONE_DECODING();
5560 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5561 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5562 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5563 IEM_MC_FETCH_EFLAGS(EFlags);
5564 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5565 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5566 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5567 else
5568 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5569
5570 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5571 IEM_MC_COMMIT_EFLAGS(EFlags);
5572 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5573 IEM_MC_ADVANCE_RIP();
5574 IEM_MC_END();
5575 return VINF_SUCCESS;
5576
5577 case IEMMODE_64BIT:
5578 IEM_MC_BEGIN(4, 3);
5579 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5580 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5581#ifdef RT_ARCH_X86
5582 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5583#else
5584 IEM_MC_ARG(uint64_t, u64Src, 2);
5585#endif
5586 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5588 IEM_MC_LOCAL(uint64_t, u64Rax);
5589
5590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5591 IEMOP_HLP_DONE_DECODING();
5592 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5593 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5594 IEM_MC_FETCH_EFLAGS(EFlags);
5595 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5596#ifdef RT_ARCH_X86
5597 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5598 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5599 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5600 else
5601 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5602#else
5603 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5604 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5605 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5606 else
5607 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5608#endif
5609
5610 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5611 IEM_MC_COMMIT_EFLAGS(EFlags);
5612 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5613 IEM_MC_ADVANCE_RIP();
5614 IEM_MC_END();
5615 return VINF_SUCCESS;
5616
5617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5618 }
5619 }
5620}
5621
5622
5623FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5624{
5625 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5626 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5627
5628 switch (pIemCpu->enmEffOpSize)
5629 {
5630 case IEMMODE_16BIT:
5631 IEM_MC_BEGIN(5, 1);
5632 IEM_MC_ARG(uint16_t, uSel, 0);
5633 IEM_MC_ARG(uint16_t, offSeg, 1);
5634 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5635 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5636 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5641 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5642 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5643 IEM_MC_END();
5644 return VINF_SUCCESS;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(5, 1);
5648 IEM_MC_ARG(uint16_t, uSel, 0);
5649 IEM_MC_ARG(uint32_t, offSeg, 1);
5650 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5651 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5652 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5653 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5657 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5658 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5659 IEM_MC_END();
5660 return VINF_SUCCESS;
5661
5662 case IEMMODE_64BIT:
5663 IEM_MC_BEGIN(5, 1);
5664 IEM_MC_ARG(uint16_t, uSel, 0);
5665 IEM_MC_ARG(uint64_t, offSeg, 1);
5666 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5667 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5668 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5669 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5673 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5674 else
5675 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5676 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5677 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5678 IEM_MC_END();
5679 return VINF_SUCCESS;
5680
5681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5682 }
5683}
5684
5685
5686/** Opcode 0x0f 0xb2. */
5687FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5688{
5689 IEMOP_MNEMONIC("lss Gv,Mp");
5690 IEMOP_HLP_MIN_386();
5691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5693 return IEMOP_RAISE_INVALID_OPCODE();
5694 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5695}
5696
5697
5698/** Opcode 0x0f 0xb3. */
5699FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5700{
5701 IEMOP_MNEMONIC("btr Ev,Gv");
5702 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5703}
5704
5705
5706/** Opcode 0x0f 0xb4. */
5707FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5708{
5709 IEMOP_MNEMONIC("lfs Gv,Mp");
5710 IEMOP_HLP_MIN_386();
5711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5713 return IEMOP_RAISE_INVALID_OPCODE();
5714 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5715}
5716
5717
5718/** Opcode 0x0f 0xb5. */
5719FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5720{
5721 IEMOP_MNEMONIC("lgs Gv,Mp");
5722 IEMOP_HLP_MIN_386();
5723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5725 return IEMOP_RAISE_INVALID_OPCODE();
5726 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5727}
5728
5729
5730/** Opcode 0x0f 0xb6. */
5731FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5732{
5733 IEMOP_MNEMONIC("movzx Gv,Eb");
5734 IEMOP_HLP_MIN_386();
5735
5736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5737 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5738
5739 /*
5740 * If rm is denoting a register, no more instruction bytes.
5741 */
5742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5743 {
5744 switch (pIemCpu->enmEffOpSize)
5745 {
5746 case IEMMODE_16BIT:
5747 IEM_MC_BEGIN(0, 1);
5748 IEM_MC_LOCAL(uint16_t, u16Value);
5749 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5750 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5751 IEM_MC_ADVANCE_RIP();
5752 IEM_MC_END();
5753 return VINF_SUCCESS;
5754
5755 case IEMMODE_32BIT:
5756 IEM_MC_BEGIN(0, 1);
5757 IEM_MC_LOCAL(uint32_t, u32Value);
5758 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5759 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5760 IEM_MC_ADVANCE_RIP();
5761 IEM_MC_END();
5762 return VINF_SUCCESS;
5763
5764 case IEMMODE_64BIT:
5765 IEM_MC_BEGIN(0, 1);
5766 IEM_MC_LOCAL(uint64_t, u64Value);
5767 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5768 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5769 IEM_MC_ADVANCE_RIP();
5770 IEM_MC_END();
5771 return VINF_SUCCESS;
5772
5773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5774 }
5775 }
5776 else
5777 {
5778 /*
5779 * We're loading a register from memory.
5780 */
5781 switch (pIemCpu->enmEffOpSize)
5782 {
5783 case IEMMODE_16BIT:
5784 IEM_MC_BEGIN(0, 2);
5785 IEM_MC_LOCAL(uint16_t, u16Value);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5788 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5789 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5790 IEM_MC_ADVANCE_RIP();
5791 IEM_MC_END();
5792 return VINF_SUCCESS;
5793
5794 case IEMMODE_32BIT:
5795 IEM_MC_BEGIN(0, 2);
5796 IEM_MC_LOCAL(uint32_t, u32Value);
5797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5799 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5800 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5801 IEM_MC_ADVANCE_RIP();
5802 IEM_MC_END();
5803 return VINF_SUCCESS;
5804
5805 case IEMMODE_64BIT:
5806 IEM_MC_BEGIN(0, 2);
5807 IEM_MC_LOCAL(uint64_t, u64Value);
5808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5810 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5811 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5812 IEM_MC_ADVANCE_RIP();
5813 IEM_MC_END();
5814 return VINF_SUCCESS;
5815
5816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5817 }
5818 }
5819}
5820
5821
5822/** Opcode 0x0f 0xb7. */
5823FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5824{
5825 IEMOP_MNEMONIC("movzx Gv,Ew");
5826 IEMOP_HLP_MIN_386();
5827
5828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5829 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5830
5831 /** @todo Not entirely sure how the operand size prefix is handled here,
5832 * assuming that it will be ignored. Would be nice to have a few
5833 * test for this. */
5834 /*
5835 * If rm is denoting a register, no more instruction bytes.
5836 */
5837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5838 {
5839 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5840 {
5841 IEM_MC_BEGIN(0, 1);
5842 IEM_MC_LOCAL(uint32_t, u32Value);
5843 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5844 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5845 IEM_MC_ADVANCE_RIP();
5846 IEM_MC_END();
5847 }
5848 else
5849 {
5850 IEM_MC_BEGIN(0, 1);
5851 IEM_MC_LOCAL(uint64_t, u64Value);
5852 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5853 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5854 IEM_MC_ADVANCE_RIP();
5855 IEM_MC_END();
5856 }
5857 }
5858 else
5859 {
5860 /*
5861 * We're loading a register from memory.
5862 */
5863 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5864 {
5865 IEM_MC_BEGIN(0, 2);
5866 IEM_MC_LOCAL(uint32_t, u32Value);
5867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5869 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5870 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5871 IEM_MC_ADVANCE_RIP();
5872 IEM_MC_END();
5873 }
5874 else
5875 {
5876 IEM_MC_BEGIN(0, 2);
5877 IEM_MC_LOCAL(uint64_t, u64Value);
5878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5880 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5881 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5882 IEM_MC_ADVANCE_RIP();
5883 IEM_MC_END();
5884 }
5885 }
5886 return VINF_SUCCESS;
5887}
5888
5889
5890/** Opcode 0x0f 0xb8. */
5891FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5892
5893
5894/** Opcode 0x0f 0xb9. */
5895FNIEMOP_DEF(iemOp_Grp10)
5896{
5897 Log(("iemOp_Grp10 -> #UD\n"));
5898 return IEMOP_RAISE_INVALID_OPCODE();
5899}
5900
5901
5902/** Opcode 0x0f 0xba. */
5903FNIEMOP_DEF(iemOp_Grp8)
5904{
5905 IEMOP_HLP_MIN_386();
5906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5907 PCIEMOPBINSIZES pImpl;
5908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5909 {
5910 case 0: case 1: case 2: case 3:
5911 return IEMOP_RAISE_INVALID_OPCODE();
5912 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5913 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5914 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5915 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5917 }
5918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5919
5920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5921 {
5922 /* register destination. */
5923 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5924 IEMOP_HLP_NO_LOCK_PREFIX();
5925
5926 switch (pIemCpu->enmEffOpSize)
5927 {
5928 case IEMMODE_16BIT:
5929 IEM_MC_BEGIN(3, 0);
5930 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5931 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5932 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5933
5934 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5935 IEM_MC_REF_EFLAGS(pEFlags);
5936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5937
5938 IEM_MC_ADVANCE_RIP();
5939 IEM_MC_END();
5940 return VINF_SUCCESS;
5941
5942 case IEMMODE_32BIT:
5943 IEM_MC_BEGIN(3, 0);
5944 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5945 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5946 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5947
5948 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5949 IEM_MC_REF_EFLAGS(pEFlags);
5950 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5951
5952 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5953 IEM_MC_ADVANCE_RIP();
5954 IEM_MC_END();
5955 return VINF_SUCCESS;
5956
5957 case IEMMODE_64BIT:
5958 IEM_MC_BEGIN(3, 0);
5959 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5960 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5961 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5962
5963 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5964 IEM_MC_REF_EFLAGS(pEFlags);
5965 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5966
5967 IEM_MC_ADVANCE_RIP();
5968 IEM_MC_END();
5969 return VINF_SUCCESS;
5970
5971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5972 }
5973 }
5974 else
5975 {
5976 /* memory destination. */
5977
5978 uint32_t fAccess;
5979 if (pImpl->pfnLockedU16)
5980 fAccess = IEM_ACCESS_DATA_RW;
5981 else /* BT */
5982 {
5983 IEMOP_HLP_NO_LOCK_PREFIX();
5984 fAccess = IEM_ACCESS_DATA_R;
5985 }
5986
5987 /** @todo test negative bit offsets! */
5988 switch (pIemCpu->enmEffOpSize)
5989 {
5990 case IEMMODE_16BIT:
5991 IEM_MC_BEGIN(3, 1);
5992 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5993 IEM_MC_ARG(uint16_t, u16Src, 1);
5994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5996
5997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5998 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5999 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6000 IEM_MC_FETCH_EFLAGS(EFlags);
6001 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6002 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6004 else
6005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6006 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6007
6008 IEM_MC_COMMIT_EFLAGS(EFlags);
6009 IEM_MC_ADVANCE_RIP();
6010 IEM_MC_END();
6011 return VINF_SUCCESS;
6012
6013 case IEMMODE_32BIT:
6014 IEM_MC_BEGIN(3, 1);
6015 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6016 IEM_MC_ARG(uint32_t, u32Src, 1);
6017 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6019
6020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6021 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6022 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6023 IEM_MC_FETCH_EFLAGS(EFlags);
6024 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6025 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6027 else
6028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6029 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6030
6031 IEM_MC_COMMIT_EFLAGS(EFlags);
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 return VINF_SUCCESS;
6035
6036 case IEMMODE_64BIT:
6037 IEM_MC_BEGIN(3, 1);
6038 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6039 IEM_MC_ARG(uint64_t, u64Src, 1);
6040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6042
6043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6044 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6045 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6046 IEM_MC_FETCH_EFLAGS(EFlags);
6047 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6048 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6049 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6050 else
6051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6053
6054 IEM_MC_COMMIT_EFLAGS(EFlags);
6055 IEM_MC_ADVANCE_RIP();
6056 IEM_MC_END();
6057 return VINF_SUCCESS;
6058
6059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6060 }
6061 }
6062
6063}
6064
6065
6066/** Opcode 0x0f 0xbb. */
6067FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6068{
6069 IEMOP_MNEMONIC("btc Ev,Gv");
6070 IEMOP_HLP_MIN_386();
6071 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6072}
6073
6074
6075/** Opcode 0x0f 0xbc. */
6076FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6077{
6078 IEMOP_MNEMONIC("bsf Gv,Ev");
6079 IEMOP_HLP_MIN_386();
6080 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6081 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6082}
6083
6084
6085/** Opcode 0x0f 0xbd. */
6086FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6087{
6088 IEMOP_MNEMONIC("bsr Gv,Ev");
6089 IEMOP_HLP_MIN_386();
6090 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6091 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6092}
6093
6094
6095/** Opcode 0x0f 0xbe. */
6096FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6097{
6098 IEMOP_MNEMONIC("movsx Gv,Eb");
6099 IEMOP_HLP_MIN_386();
6100
6101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6102 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6103
6104 /*
6105 * If rm is denoting a register, no more instruction bytes.
6106 */
6107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6108 {
6109 switch (pIemCpu->enmEffOpSize)
6110 {
6111 case IEMMODE_16BIT:
6112 IEM_MC_BEGIN(0, 1);
6113 IEM_MC_LOCAL(uint16_t, u16Value);
6114 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6115 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6116 IEM_MC_ADVANCE_RIP();
6117 IEM_MC_END();
6118 return VINF_SUCCESS;
6119
6120 case IEMMODE_32BIT:
6121 IEM_MC_BEGIN(0, 1);
6122 IEM_MC_LOCAL(uint32_t, u32Value);
6123 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6124 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6125 IEM_MC_ADVANCE_RIP();
6126 IEM_MC_END();
6127 return VINF_SUCCESS;
6128
6129 case IEMMODE_64BIT:
6130 IEM_MC_BEGIN(0, 1);
6131 IEM_MC_LOCAL(uint64_t, u64Value);
6132 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6133 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6134 IEM_MC_ADVANCE_RIP();
6135 IEM_MC_END();
6136 return VINF_SUCCESS;
6137
6138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6139 }
6140 }
6141 else
6142 {
6143 /*
6144 * We're loading a register from memory.
6145 */
6146 switch (pIemCpu->enmEffOpSize)
6147 {
6148 case IEMMODE_16BIT:
6149 IEM_MC_BEGIN(0, 2);
6150 IEM_MC_LOCAL(uint16_t, u16Value);
6151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6153 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6154 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6155 IEM_MC_ADVANCE_RIP();
6156 IEM_MC_END();
6157 return VINF_SUCCESS;
6158
6159 case IEMMODE_32BIT:
6160 IEM_MC_BEGIN(0, 2);
6161 IEM_MC_LOCAL(uint32_t, u32Value);
6162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6164 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6165 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6166 IEM_MC_ADVANCE_RIP();
6167 IEM_MC_END();
6168 return VINF_SUCCESS;
6169
6170 case IEMMODE_64BIT:
6171 IEM_MC_BEGIN(0, 2);
6172 IEM_MC_LOCAL(uint64_t, u64Value);
6173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6175 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6176 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6177 IEM_MC_ADVANCE_RIP();
6178 IEM_MC_END();
6179 return VINF_SUCCESS;
6180
6181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6182 }
6183 }
6184}
6185
6186
6187/** Opcode 0x0f 0xbf. */
6188FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6189{
6190 IEMOP_MNEMONIC("movsx Gv,Ew");
6191 IEMOP_HLP_MIN_386();
6192
6193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6194 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6195
6196 /** @todo Not entirely sure how the operand size prefix is handled here,
6197 * assuming that it will be ignored. Would be nice to have a few
6198 * test for this. */
6199 /*
6200 * If rm is denoting a register, no more instruction bytes.
6201 */
6202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6203 {
6204 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6205 {
6206 IEM_MC_BEGIN(0, 1);
6207 IEM_MC_LOCAL(uint32_t, u32Value);
6208 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6209 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6210 IEM_MC_ADVANCE_RIP();
6211 IEM_MC_END();
6212 }
6213 else
6214 {
6215 IEM_MC_BEGIN(0, 1);
6216 IEM_MC_LOCAL(uint64_t, u64Value);
6217 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6218 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6219 IEM_MC_ADVANCE_RIP();
6220 IEM_MC_END();
6221 }
6222 }
6223 else
6224 {
6225 /*
6226 * We're loading a register from memory.
6227 */
6228 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6229 {
6230 IEM_MC_BEGIN(0, 2);
6231 IEM_MC_LOCAL(uint32_t, u32Value);
6232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6234 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6235 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6236 IEM_MC_ADVANCE_RIP();
6237 IEM_MC_END();
6238 }
6239 else
6240 {
6241 IEM_MC_BEGIN(0, 2);
6242 IEM_MC_LOCAL(uint64_t, u64Value);
6243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6245 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6246 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6247 IEM_MC_ADVANCE_RIP();
6248 IEM_MC_END();
6249 }
6250 }
6251 return VINF_SUCCESS;
6252}
6253
6254
6255/** Opcode 0x0f 0xc0. */
6256FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6257{
6258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6259 IEMOP_HLP_MIN_486();
6260 IEMOP_MNEMONIC("xadd Eb,Gb");
6261
6262 /*
6263 * If rm is denoting a register, no more instruction bytes.
6264 */
6265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6266 {
6267 IEMOP_HLP_NO_LOCK_PREFIX();
6268
6269 IEM_MC_BEGIN(3, 0);
6270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6271 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6273
6274 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6275 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6276 IEM_MC_REF_EFLAGS(pEFlags);
6277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6278
6279 IEM_MC_ADVANCE_RIP();
6280 IEM_MC_END();
6281 }
6282 else
6283 {
6284 /*
6285 * We're accessing memory.
6286 */
6287 IEM_MC_BEGIN(3, 3);
6288 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6289 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6290 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6291 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6293
6294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6295 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6296 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6297 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6298 IEM_MC_FETCH_EFLAGS(EFlags);
6299 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6300 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6301 else
6302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6303
6304 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6305 IEM_MC_COMMIT_EFLAGS(EFlags);
6306 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6307 IEM_MC_ADVANCE_RIP();
6308 IEM_MC_END();
6309 return VINF_SUCCESS;
6310 }
6311 return VINF_SUCCESS;
6312}
6313
6314
6315/** Opcode 0x0f 0xc1. */
6316FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6317{
6318 IEMOP_MNEMONIC("xadd Ev,Gv");
6319 IEMOP_HLP_MIN_486();
6320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6321
6322 /*
6323 * If rm is denoting a register, no more instruction bytes.
6324 */
6325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6326 {
6327 IEMOP_HLP_NO_LOCK_PREFIX();
6328
6329 switch (pIemCpu->enmEffOpSize)
6330 {
6331 case IEMMODE_16BIT:
6332 IEM_MC_BEGIN(3, 0);
6333 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6334 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6335 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6336
6337 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6338 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6339 IEM_MC_REF_EFLAGS(pEFlags);
6340 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6341
6342 IEM_MC_ADVANCE_RIP();
6343 IEM_MC_END();
6344 return VINF_SUCCESS;
6345
6346 case IEMMODE_32BIT:
6347 IEM_MC_BEGIN(3, 0);
6348 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6349 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6350 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6351
6352 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6353 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6354 IEM_MC_REF_EFLAGS(pEFlags);
6355 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6356
6357 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6358 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 return VINF_SUCCESS;
6362
6363 case IEMMODE_64BIT:
6364 IEM_MC_BEGIN(3, 0);
6365 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6366 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6368
6369 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6370 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6371 IEM_MC_REF_EFLAGS(pEFlags);
6372 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6373
6374 IEM_MC_ADVANCE_RIP();
6375 IEM_MC_END();
6376 return VINF_SUCCESS;
6377
6378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6379 }
6380 }
6381 else
6382 {
6383 /*
6384 * We're accessing memory.
6385 */
6386 switch (pIemCpu->enmEffOpSize)
6387 {
6388 case IEMMODE_16BIT:
6389 IEM_MC_BEGIN(3, 3);
6390 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6391 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6392 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6393 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6395
6396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6397 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6398 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6399 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6400 IEM_MC_FETCH_EFLAGS(EFlags);
6401 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6402 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6403 else
6404 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6405
6406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6407 IEM_MC_COMMIT_EFLAGS(EFlags);
6408 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_32BIT:
6414 IEM_MC_BEGIN(3, 3);
6415 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6416 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6417 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6418 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6420
6421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6422 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6423 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6424 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6425 IEM_MC_FETCH_EFLAGS(EFlags);
6426 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6427 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6428 else
6429 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6430
6431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6432 IEM_MC_COMMIT_EFLAGS(EFlags);
6433 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6434 IEM_MC_ADVANCE_RIP();
6435 IEM_MC_END();
6436 return VINF_SUCCESS;
6437
6438 case IEMMODE_64BIT:
6439 IEM_MC_BEGIN(3, 3);
6440 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6441 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6442 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6443 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6445
6446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6447 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6448 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6449 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6450 IEM_MC_FETCH_EFLAGS(EFlags);
6451 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6452 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6453 else
6454 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6455
6456 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6457 IEM_MC_COMMIT_EFLAGS(EFlags);
6458 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6459 IEM_MC_ADVANCE_RIP();
6460 IEM_MC_END();
6461 return VINF_SUCCESS;
6462
6463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6464 }
6465 }
6466}
6467
6468/** Opcode 0x0f 0xc2. */
6469FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6470
6471
6472/** Opcode 0x0f 0xc3. */
6473#if 0 //ndef VBOX_WITH_REM
6474FNIEMOP_DEF(iemOp_movnti_My_Gy)
6475{
6476 IEMOP_MNEMONIC("mov Ev,Gv");
6477
6478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6479
6480 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6481 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6482 {
6483 switch (pIemCpu->enmEffOpSize)
6484 {
6485 case IEMMODE_32BIT:
6486 IEM_MC_BEGIN(0, 2);
6487 IEM_MC_LOCAL(uint32_t, u32Value);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489
6490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6492 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6493 return IEMOP_RAISE_INVALID_OPCODE();
6494
6495 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6496 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
6497 IEM_MC_ADVANCE_RIP();
6498 IEM_MC_END();
6499 break;
6500
6501 case IEMMODE_64BIT:
6502 IEM_MC_BEGIN(0, 2);
6503 IEM_MC_LOCAL(uint64_t, u64Value);
6504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6505
6506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6509 return IEMOP_RAISE_INVALID_OPCODE();
6510
6511 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6512 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
6513 IEM_MC_ADVANCE_RIP();
6514 IEM_MC_END();
6515 break;
6516
6517 case IEMMODE_16BIT:
6518 /** @todo check this form. */
6519 return IEMOP_RAISE_INVALID_OPCODE();
6520 }
6521 }
6522 else
6523 return IEMOP_RAISE_INVALID_OPCODE();
6524 return VINF_SUCCESS;
6525}
6526#else
6527FNIEMOP_STUB(iemOp_movnti_My_Gy); // solaris 10 uses this in hat_pte_zero().
6528#endif
6529
6530
6531/** Opcode 0x0f 0xc4. */
6532FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6533
6534/** Opcode 0x0f 0xc5. */
6535FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6536
6537/** Opcode 0x0f 0xc6. */
6538FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6539
6540
6541/** Opcode 0x0f 0xc7 !11/1. */
6542FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6543{
6544 IEMOP_MNEMONIC("cmpxchg8b Mq");
6545
6546 IEM_MC_BEGIN(4, 3);
6547 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6548 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6549 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6550 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6551 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6552 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6554
6555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6556 IEMOP_HLP_DONE_DECODING();
6557 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6558
6559 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6560 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6561 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6562
6563 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6564 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6565 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6566
6567 IEM_MC_FETCH_EFLAGS(EFlags);
6568 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6569 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6570 else
6571 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6572
6573 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6574 IEM_MC_COMMIT_EFLAGS(EFlags);
6575 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6576 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6577 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6578 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6579 IEM_MC_ENDIF();
6580 IEM_MC_ADVANCE_RIP();
6581
6582 IEM_MC_END();
6583 return VINF_SUCCESS;
6584}
6585
6586
6587/** Opcode REX.W 0x0f 0xc7 !11/1. */
6588FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6589
6590/** Opcode 0x0f 0xc7 11/6. */
6591FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6592
6593/** Opcode 0x0f 0xc7 !11/6. */
6594FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6595
6596/** Opcode 0x66 0x0f 0xc7 !11/6. */
6597FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6598
6599/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6600FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6601
6602/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6603FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6604
6605
6606/** Opcode 0x0f 0xc7. */
6607FNIEMOP_DEF(iemOp_Grp9)
6608{
6609 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6611 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6612 {
6613 case 0: case 2: case 3: case 4: case 5:
6614 return IEMOP_RAISE_INVALID_OPCODE();
6615 case 1:
6616 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6617 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6618 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6619 return IEMOP_RAISE_INVALID_OPCODE();
6620 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6621 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6622 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6623 case 6:
6624 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6625 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6626 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6627 {
6628 case 0:
6629 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6630 case IEM_OP_PRF_SIZE_OP:
6631 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6632 case IEM_OP_PRF_REPZ:
6633 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6634 default:
6635 return IEMOP_RAISE_INVALID_OPCODE();
6636 }
6637 case 7:
6638 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6639 {
6640 case 0:
6641 case IEM_OP_PRF_REPZ:
6642 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6643 default:
6644 return IEMOP_RAISE_INVALID_OPCODE();
6645 }
6646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6647 }
6648}
6649
6650
6651/**
6652 * Common 'bswap register' helper.
6653 */
6654FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6655{
6656 IEMOP_HLP_NO_LOCK_PREFIX();
6657 switch (pIemCpu->enmEffOpSize)
6658 {
6659 case IEMMODE_16BIT:
6660 IEM_MC_BEGIN(1, 0);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6663 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6664 IEM_MC_ADVANCE_RIP();
6665 IEM_MC_END();
6666 return VINF_SUCCESS;
6667
6668 case IEMMODE_32BIT:
6669 IEM_MC_BEGIN(1, 0);
6670 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6671 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6673 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6674 IEM_MC_ADVANCE_RIP();
6675 IEM_MC_END();
6676 return VINF_SUCCESS;
6677
6678 case IEMMODE_64BIT:
6679 IEM_MC_BEGIN(1, 0);
6680 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6681 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6682 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6683 IEM_MC_ADVANCE_RIP();
6684 IEM_MC_END();
6685 return VINF_SUCCESS;
6686
6687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6688 }
6689}
6690
6691
6692/** Opcode 0x0f 0xc8. */
6693FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6694{
6695 IEMOP_MNEMONIC("bswap rAX/r8");
6696 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6697 prefix. REX.B is the correct prefix it appears. For a parallel
6698 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6699 IEMOP_HLP_MIN_486();
6700 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6701}
6702
6703
6704/** Opcode 0x0f 0xc9. */
6705FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6706{
6707 IEMOP_MNEMONIC("bswap rCX/r9");
6708 IEMOP_HLP_MIN_486();
6709 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6710}
6711
6712
6713/** Opcode 0x0f 0xca. */
6714FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6715{
6716 IEMOP_MNEMONIC("bswap rDX/r9");
6717 IEMOP_HLP_MIN_486();
6718 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6719}
6720
6721
6722/** Opcode 0x0f 0xcb. */
6723FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6724{
6725 IEMOP_MNEMONIC("bswap rBX/r9");
6726 IEMOP_HLP_MIN_486();
6727 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6728}
6729
6730
6731/** Opcode 0x0f 0xcc. */
6732FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6733{
6734 IEMOP_MNEMONIC("bswap rSP/r12");
6735 IEMOP_HLP_MIN_486();
6736 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6737}
6738
6739
6740/** Opcode 0x0f 0xcd. */
6741FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6742{
6743 IEMOP_MNEMONIC("bswap rBP/r13");
6744 IEMOP_HLP_MIN_486();
6745 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6746}
6747
6748
6749/** Opcode 0x0f 0xce. */
6750FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6751{
6752 IEMOP_MNEMONIC("bswap rSI/r14");
6753 IEMOP_HLP_MIN_486();
6754 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6755}
6756
6757
6758/** Opcode 0x0f 0xcf. */
6759FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6760{
6761 IEMOP_MNEMONIC("bswap rDI/r15");
6762 IEMOP_HLP_MIN_486();
6763 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6764}
6765
6766
6767
6768/** Opcode 0x0f 0xd0. */
6769FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6770/** Opcode 0x0f 0xd1. */
6771FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6772/** Opcode 0x0f 0xd2. */
6773FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6774/** Opcode 0x0f 0xd3. */
6775FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6776/** Opcode 0x0f 0xd4. */
6777FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6778/** Opcode 0x0f 0xd5. */
6779FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6780/** Opcode 0x0f 0xd6. */
6781FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6782
6783
6784/** Opcode 0x0f 0xd7. */
6785FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6786{
6787 /* Docs says register only. */
6788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6789 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6790 return IEMOP_RAISE_INVALID_OPCODE();
6791
6792 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6793 /** @todo testcase: Check that the instruction implicitly clears the high
6794 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6795 * and opcode modifications are made to work with the whole width (not
6796 * just 128). */
6797 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6798 {
6799 case IEM_OP_PRF_SIZE_OP: /* SSE */
6800 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6801 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6802 IEM_MC_BEGIN(2, 0);
6803 IEM_MC_ARG(uint64_t *, pDst, 0);
6804 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6805 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6806 IEM_MC_PREPARE_SSE_USAGE();
6807 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6808 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6809 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6810 IEM_MC_ADVANCE_RIP();
6811 IEM_MC_END();
6812 return VINF_SUCCESS;
6813
6814 case 0: /* MMX */
6815 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6816 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6817 IEM_MC_BEGIN(2, 0);
6818 IEM_MC_ARG(uint64_t *, pDst, 0);
6819 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6820 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6821 IEM_MC_PREPARE_FPU_USAGE();
6822 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6823 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6824 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6825 IEM_MC_ADVANCE_RIP();
6826 IEM_MC_END();
6827 return VINF_SUCCESS;
6828
6829 default:
6830 return IEMOP_RAISE_INVALID_OPCODE();
6831 }
6832}
6833
6834
6835/** Opcode 0x0f 0xd8. */
6836FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6837/** Opcode 0x0f 0xd9. */
6838FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6839/** Opcode 0x0f 0xda. */
6840FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6841/** Opcode 0x0f 0xdb. */
6842FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6843/** Opcode 0x0f 0xdc. */
6844FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6845/** Opcode 0x0f 0xdd. */
6846FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6847/** Opcode 0x0f 0xde. */
6848FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6849/** Opcode 0x0f 0xdf. */
6850FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6851/** Opcode 0x0f 0xe0. */
6852FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6853/** Opcode 0x0f 0xe1. */
6854FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6855/** Opcode 0x0f 0xe2. */
6856FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6857/** Opcode 0x0f 0xe3. */
6858FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6859/** Opcode 0x0f 0xe4. */
6860FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6861/** Opcode 0x0f 0xe5. */
6862FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6863/** Opcode 0x0f 0xe6. */
6864FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6865
6866
6867/** Opcode 0x0f 0xe7. */
6868FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
6869{
6870 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
6871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6872 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6873 {
6874 /*
6875 * Register, memory.
6876 */
6877/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
6878 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6879 {
6880
6881 case IEM_OP_PRF_SIZE_OP: /* SSE */
6882 IEM_MC_BEGIN(0, 2);
6883 IEM_MC_LOCAL(uint128_t, uSrc);
6884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6885
6886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6888 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6889 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6890
6891 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6892 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6893
6894 IEM_MC_ADVANCE_RIP();
6895 IEM_MC_END();
6896 break;
6897
6898 case 0: /* MMX */
6899 IEM_MC_BEGIN(0, 2);
6900 IEM_MC_LOCAL(uint64_t, uSrc);
6901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6902
6903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6905 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6906 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6907
6908 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6909 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6910
6911 IEM_MC_ADVANCE_RIP();
6912 IEM_MC_END();
6913 break;
6914
6915 default:
6916 return IEMOP_RAISE_INVALID_OPCODE();
6917 }
6918 }
6919 /* The register, register encoding is invalid. */
6920 else
6921 return IEMOP_RAISE_INVALID_OPCODE();
6922 return VINF_SUCCESS;
6923}
6924
6925
6926/** Opcode 0x0f 0xe8. */
6927FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6928/** Opcode 0x0f 0xe9. */
6929FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6930/** Opcode 0x0f 0xea. */
6931FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6932/** Opcode 0x0f 0xeb. */
6933FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6934/** Opcode 0x0f 0xec. */
6935FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6936/** Opcode 0x0f 0xed. */
6937FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6938/** Opcode 0x0f 0xee. */
6939FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6940
6941
6942/** Opcode 0x0f 0xef. */
6943FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6944{
6945 IEMOP_MNEMONIC("pxor");
6946 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6947}
6948
6949
6950/** Opcode 0x0f 0xf0. */
6951FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6952/** Opcode 0x0f 0xf1. */
6953FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6954/** Opcode 0x0f 0xf2. */
6955FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6956/** Opcode 0x0f 0xf3. */
6957FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6958/** Opcode 0x0f 0xf4. */
6959FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6960/** Opcode 0x0f 0xf5. */
6961FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6962/** Opcode 0x0f 0xf6. */
6963FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6964/** Opcode 0x0f 0xf7. */
6965FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6966/** Opcode 0x0f 0xf8. */
6967FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6968/** Opcode 0x0f 0xf9. */
6969FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6970/** Opcode 0x0f 0xfa. */
6971FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6972/** Opcode 0x0f 0xfb. */
6973FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6974/** Opcode 0x0f 0xfc. */
6975FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6976/** Opcode 0x0f 0xfd. */
6977FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6978/** Opcode 0x0f 0xfe. */
6979FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6980
6981
6982const PFNIEMOP g_apfnTwoByteMap[256] =
6983{
6984 /* 0x00 */ iemOp_Grp6,
6985 /* 0x01 */ iemOp_Grp7,
6986 /* 0x02 */ iemOp_lar_Gv_Ew,
6987 /* 0x03 */ iemOp_lsl_Gv_Ew,
6988 /* 0x04 */ iemOp_Invalid,
6989 /* 0x05 */ iemOp_syscall,
6990 /* 0x06 */ iemOp_clts,
6991 /* 0x07 */ iemOp_sysret,
6992 /* 0x08 */ iemOp_invd,
6993 /* 0x09 */ iemOp_wbinvd,
6994 /* 0x0a */ iemOp_Invalid,
6995 /* 0x0b */ iemOp_ud2,
6996 /* 0x0c */ iemOp_Invalid,
6997 /* 0x0d */ iemOp_nop_Ev_GrpP,
6998 /* 0x0e */ iemOp_femms,
6999 /* 0x0f */ iemOp_3Dnow,
7000 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7001 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7002 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7003 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7004 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7005 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7006 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7007 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7008 /* 0x18 */ iemOp_prefetch_Grp16,
7009 /* 0x19 */ iemOp_nop_Ev,
7010 /* 0x1a */ iemOp_nop_Ev,
7011 /* 0x1b */ iemOp_nop_Ev,
7012 /* 0x1c */ iemOp_nop_Ev,
7013 /* 0x1d */ iemOp_nop_Ev,
7014 /* 0x1e */ iemOp_nop_Ev,
7015 /* 0x1f */ iemOp_nop_Ev,
7016 /* 0x20 */ iemOp_mov_Rd_Cd,
7017 /* 0x21 */ iemOp_mov_Rd_Dd,
7018 /* 0x22 */ iemOp_mov_Cd_Rd,
7019 /* 0x23 */ iemOp_mov_Dd_Rd,
7020 /* 0x24 */ iemOp_mov_Rd_Td,
7021 /* 0x25 */ iemOp_Invalid,
7022 /* 0x26 */ iemOp_mov_Td_Rd,
7023 /* 0x27 */ iemOp_Invalid,
7024 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7025 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7026 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7027 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7028 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7029 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7030 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7031 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7032 /* 0x30 */ iemOp_wrmsr,
7033 /* 0x31 */ iemOp_rdtsc,
7034 /* 0x32 */ iemOp_rdmsr,
7035 /* 0x33 */ iemOp_rdpmc,
7036 /* 0x34 */ iemOp_sysenter,
7037 /* 0x35 */ iemOp_sysexit,
7038 /* 0x36 */ iemOp_Invalid,
7039 /* 0x37 */ iemOp_getsec,
7040 /* 0x38 */ iemOp_3byte_Esc_A4,
7041 /* 0x39 */ iemOp_Invalid,
7042 /* 0x3a */ iemOp_3byte_Esc_A5,
7043 /* 0x3b */ iemOp_Invalid,
7044 /* 0x3c */ iemOp_Invalid,
7045 /* 0x3d */ iemOp_Invalid,
7046 /* 0x3e */ iemOp_Invalid,
7047 /* 0x3f */ iemOp_Invalid,
7048 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7049 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7050 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7051 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7052 /* 0x44 */ iemOp_cmove_Gv_Ev,
7053 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7054 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7055 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7056 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7057 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7058 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7059 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7060 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7061 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7062 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7063 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7064 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7065 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7066 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7067 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7068 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7069 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7070 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7071 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7072 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7073 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7074 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7075 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7076 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7077 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7078 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7079 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7080 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7081 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7082 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7083 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7084 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7085 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7086 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7087 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7088 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7089 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7090 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7091 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7092 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7093 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7094 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7095 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7096 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7097 /* 0x71 */ iemOp_Grp12,
7098 /* 0x72 */ iemOp_Grp13,
7099 /* 0x73 */ iemOp_Grp14,
7100 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7101 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7102 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7103 /* 0x77 */ iemOp_emms,
7104 /* 0x78 */ iemOp_vmread_AmdGrp17,
7105 /* 0x79 */ iemOp_vmwrite,
7106 /* 0x7a */ iemOp_Invalid,
7107 /* 0x7b */ iemOp_Invalid,
7108 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7109 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7110 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7111 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7112 /* 0x80 */ iemOp_jo_Jv,
7113 /* 0x81 */ iemOp_jno_Jv,
7114 /* 0x82 */ iemOp_jc_Jv,
7115 /* 0x83 */ iemOp_jnc_Jv,
7116 /* 0x84 */ iemOp_je_Jv,
7117 /* 0x85 */ iemOp_jne_Jv,
7118 /* 0x86 */ iemOp_jbe_Jv,
7119 /* 0x87 */ iemOp_jnbe_Jv,
7120 /* 0x88 */ iemOp_js_Jv,
7121 /* 0x89 */ iemOp_jns_Jv,
7122 /* 0x8a */ iemOp_jp_Jv,
7123 /* 0x8b */ iemOp_jnp_Jv,
7124 /* 0x8c */ iemOp_jl_Jv,
7125 /* 0x8d */ iemOp_jnl_Jv,
7126 /* 0x8e */ iemOp_jle_Jv,
7127 /* 0x8f */ iemOp_jnle_Jv,
7128 /* 0x90 */ iemOp_seto_Eb,
7129 /* 0x91 */ iemOp_setno_Eb,
7130 /* 0x92 */ iemOp_setc_Eb,
7131 /* 0x93 */ iemOp_setnc_Eb,
7132 /* 0x94 */ iemOp_sete_Eb,
7133 /* 0x95 */ iemOp_setne_Eb,
7134 /* 0x96 */ iemOp_setbe_Eb,
7135 /* 0x97 */ iemOp_setnbe_Eb,
7136 /* 0x98 */ iemOp_sets_Eb,
7137 /* 0x99 */ iemOp_setns_Eb,
7138 /* 0x9a */ iemOp_setp_Eb,
7139 /* 0x9b */ iemOp_setnp_Eb,
7140 /* 0x9c */ iemOp_setl_Eb,
7141 /* 0x9d */ iemOp_setnl_Eb,
7142 /* 0x9e */ iemOp_setle_Eb,
7143 /* 0x9f */ iemOp_setnle_Eb,
7144 /* 0xa0 */ iemOp_push_fs,
7145 /* 0xa1 */ iemOp_pop_fs,
7146 /* 0xa2 */ iemOp_cpuid,
7147 /* 0xa3 */ iemOp_bt_Ev_Gv,
7148 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7149 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7150 /* 0xa6 */ iemOp_Invalid,
7151 /* 0xa7 */ iemOp_Invalid,
7152 /* 0xa8 */ iemOp_push_gs,
7153 /* 0xa9 */ iemOp_pop_gs,
7154 /* 0xaa */ iemOp_rsm,
7155 /* 0xab */ iemOp_bts_Ev_Gv,
7156 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7157 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7158 /* 0xae */ iemOp_Grp15,
7159 /* 0xaf */ iemOp_imul_Gv_Ev,
7160 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7161 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7162 /* 0xb2 */ iemOp_lss_Gv_Mp,
7163 /* 0xb3 */ iemOp_btr_Ev_Gv,
7164 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7165 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7166 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7167 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7168 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7169 /* 0xb9 */ iemOp_Grp10,
7170 /* 0xba */ iemOp_Grp8,
7171 /* 0xbd */ iemOp_btc_Ev_Gv,
7172 /* 0xbc */ iemOp_bsf_Gv_Ev,
7173 /* 0xbd */ iemOp_bsr_Gv_Ev,
7174 /* 0xbe */ iemOp_movsx_Gv_Eb,
7175 /* 0xbf */ iemOp_movsx_Gv_Ew,
7176 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7177 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7178 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7179 /* 0xc3 */ iemOp_movnti_My_Gy,
7180 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7181 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7182 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7183 /* 0xc7 */ iemOp_Grp9,
7184 /* 0xc8 */ iemOp_bswap_rAX_r8,
7185 /* 0xc9 */ iemOp_bswap_rCX_r9,
7186 /* 0xca */ iemOp_bswap_rDX_r10,
7187 /* 0xcb */ iemOp_bswap_rBX_r11,
7188 /* 0xcc */ iemOp_bswap_rSP_r12,
7189 /* 0xcd */ iemOp_bswap_rBP_r13,
7190 /* 0xce */ iemOp_bswap_rSI_r14,
7191 /* 0xcf */ iemOp_bswap_rDI_r15,
7192 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7193 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7194 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7195 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7196 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7197 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7198 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7199 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7200 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7201 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7202 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7203 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7204 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7205 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7206 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7207 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7208 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7209 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7210 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7211 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7212 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7213 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7214 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7215 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7216 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7217 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7218 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7219 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7220 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7221 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7222 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7223 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7224 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7225 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7226 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7227 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7228 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7229 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7230 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7231 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7232 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7233 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7234 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7235 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7236 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7237 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7238 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7239 /* 0xff */ iemOp_Invalid
7240};
7241
7242/** @} */
7243
7244
7245/** @name One byte opcodes.
7246 *
7247 * @{
7248 */
7249
7250/** Opcode 0x00. */
7251FNIEMOP_DEF(iemOp_add_Eb_Gb)
7252{
7253 IEMOP_MNEMONIC("add Eb,Gb");
7254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7255}
7256
7257
7258/** Opcode 0x01. */
7259FNIEMOP_DEF(iemOp_add_Ev_Gv)
7260{
7261 IEMOP_MNEMONIC("add Ev,Gv");
7262 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7263}
7264
7265
7266/** Opcode 0x02. */
7267FNIEMOP_DEF(iemOp_add_Gb_Eb)
7268{
7269 IEMOP_MNEMONIC("add Gb,Eb");
7270 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7271}
7272
7273
7274/** Opcode 0x03. */
7275FNIEMOP_DEF(iemOp_add_Gv_Ev)
7276{
7277 IEMOP_MNEMONIC("add Gv,Ev");
7278 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7279}
7280
7281
7282/** Opcode 0x04. */
7283FNIEMOP_DEF(iemOp_add_Al_Ib)
7284{
7285 IEMOP_MNEMONIC("add al,Ib");
7286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7287}
7288
7289
7290/** Opcode 0x05. */
7291FNIEMOP_DEF(iemOp_add_eAX_Iz)
7292{
7293 IEMOP_MNEMONIC("add rAX,Iz");
7294 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7295}
7296
7297
7298/** Opcode 0x06. */
7299FNIEMOP_DEF(iemOp_push_ES)
7300{
7301 IEMOP_MNEMONIC("push es");
7302 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7303}
7304
7305
7306/** Opcode 0x07. */
7307FNIEMOP_DEF(iemOp_pop_ES)
7308{
7309 IEMOP_MNEMONIC("pop es");
7310 IEMOP_HLP_NO_64BIT();
7311 IEMOP_HLP_NO_LOCK_PREFIX();
7312 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7313}
7314
7315
7316/** Opcode 0x08. */
7317FNIEMOP_DEF(iemOp_or_Eb_Gb)
7318{
7319 IEMOP_MNEMONIC("or Eb,Gb");
7320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7321 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7322}
7323
7324
7325/** Opcode 0x09. */
7326FNIEMOP_DEF(iemOp_or_Ev_Gv)
7327{
7328 IEMOP_MNEMONIC("or Ev,Gv ");
7329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7331}
7332
7333
7334/** Opcode 0x0a. */
7335FNIEMOP_DEF(iemOp_or_Gb_Eb)
7336{
7337 IEMOP_MNEMONIC("or Gb,Eb");
7338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7339 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7340}
7341
7342
7343/** Opcode 0x0b. */
7344FNIEMOP_DEF(iemOp_or_Gv_Ev)
7345{
7346 IEMOP_MNEMONIC("or Gv,Ev");
7347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7349}
7350
7351
7352/** Opcode 0x0c. */
7353FNIEMOP_DEF(iemOp_or_Al_Ib)
7354{
7355 IEMOP_MNEMONIC("or al,Ib");
7356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7357 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7358}
7359
7360
7361/** Opcode 0x0d. */
7362FNIEMOP_DEF(iemOp_or_eAX_Iz)
7363{
7364 IEMOP_MNEMONIC("or rAX,Iz");
7365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7367}
7368
7369
7370/** Opcode 0x0e. */
7371FNIEMOP_DEF(iemOp_push_CS)
7372{
7373 IEMOP_MNEMONIC("push cs");
7374 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7375}
7376
7377
7378/** Opcode 0x0f. */
7379FNIEMOP_DEF(iemOp_2byteEscape)
7380{
7381 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7382 /** @todo PUSH CS on 8086, undefined on 80186. */
7383 IEMOP_HLP_MIN_286();
7384 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7385}
7386
7387/** Opcode 0x10. */
7388FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7389{
7390 IEMOP_MNEMONIC("adc Eb,Gb");
7391 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7392}
7393
7394
7395/** Opcode 0x11. */
7396FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7397{
7398 IEMOP_MNEMONIC("adc Ev,Gv");
7399 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7400}
7401
7402
7403/** Opcode 0x12. */
7404FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7405{
7406 IEMOP_MNEMONIC("adc Gb,Eb");
7407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7408}
7409
7410
7411/** Opcode 0x13. */
7412FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7413{
7414 IEMOP_MNEMONIC("adc Gv,Ev");
7415 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7416}
7417
7418
7419/** Opcode 0x14. */
7420FNIEMOP_DEF(iemOp_adc_Al_Ib)
7421{
7422 IEMOP_MNEMONIC("adc al,Ib");
7423 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7424}
7425
7426
7427/** Opcode 0x15. */
7428FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7429{
7430 IEMOP_MNEMONIC("adc rAX,Iz");
7431 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7432}
7433
7434
7435/** Opcode 0x16. */
7436FNIEMOP_DEF(iemOp_push_SS)
7437{
7438 IEMOP_MNEMONIC("push ss");
7439 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7440}
7441
7442
7443/** Opcode 0x17. */
7444FNIEMOP_DEF(iemOp_pop_SS)
7445{
7446 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7447 IEMOP_HLP_NO_LOCK_PREFIX();
7448 IEMOP_HLP_NO_64BIT();
7449 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7450}
7451
7452
7453/** Opcode 0x18. */
7454FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7455{
7456 IEMOP_MNEMONIC("sbb Eb,Gb");
7457 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7458}
7459
7460
7461/** Opcode 0x19. */
7462FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7463{
7464 IEMOP_MNEMONIC("sbb Ev,Gv");
7465 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7466}
7467
7468
7469/** Opcode 0x1a. */
7470FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7471{
7472 IEMOP_MNEMONIC("sbb Gb,Eb");
7473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7474}
7475
7476
7477/** Opcode 0x1b. */
7478FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7479{
7480 IEMOP_MNEMONIC("sbb Gv,Ev");
7481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7482}
7483
7484
7485/** Opcode 0x1c. */
7486FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7487{
7488 IEMOP_MNEMONIC("sbb al,Ib");
7489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7490}
7491
7492
7493/** Opcode 0x1d. */
7494FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7495{
7496 IEMOP_MNEMONIC("sbb rAX,Iz");
7497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7498}
7499
7500
7501/** Opcode 0x1e. */
7502FNIEMOP_DEF(iemOp_push_DS)
7503{
7504 IEMOP_MNEMONIC("push ds");
7505 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7506}
7507
7508
7509/** Opcode 0x1f. */
7510FNIEMOP_DEF(iemOp_pop_DS)
7511{
7512 IEMOP_MNEMONIC("pop ds");
7513 IEMOP_HLP_NO_LOCK_PREFIX();
7514 IEMOP_HLP_NO_64BIT();
7515 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7516}
7517
7518
7519/** Opcode 0x20. */
7520FNIEMOP_DEF(iemOp_and_Eb_Gb)
7521{
7522 IEMOP_MNEMONIC("and Eb,Gb");
7523 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7524 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7525}
7526
7527
7528/** Opcode 0x21. */
7529FNIEMOP_DEF(iemOp_and_Ev_Gv)
7530{
7531 IEMOP_MNEMONIC("and Ev,Gv");
7532 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7533 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7534}
7535
7536
7537/** Opcode 0x22. */
7538FNIEMOP_DEF(iemOp_and_Gb_Eb)
7539{
7540 IEMOP_MNEMONIC("and Gb,Eb");
7541 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7542 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7543}
7544
7545
7546/** Opcode 0x23. */
7547FNIEMOP_DEF(iemOp_and_Gv_Ev)
7548{
7549 IEMOP_MNEMONIC("and Gv,Ev");
7550 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7551 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7552}
7553
7554
7555/** Opcode 0x24. */
7556FNIEMOP_DEF(iemOp_and_Al_Ib)
7557{
7558 IEMOP_MNEMONIC("and al,Ib");
7559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7560 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7561}
7562
7563
7564/** Opcode 0x25. */
7565FNIEMOP_DEF(iemOp_and_eAX_Iz)
7566{
7567 IEMOP_MNEMONIC("and rAX,Iz");
7568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7569 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7570}
7571
7572
7573/** Opcode 0x26. */
7574FNIEMOP_DEF(iemOp_seg_ES)
7575{
7576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7577 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7578 pIemCpu->iEffSeg = X86_SREG_ES;
7579
7580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7582}
7583
7584
7585/** Opcode 0x27. */
7586FNIEMOP_DEF(iemOp_daa)
7587{
7588 IEMOP_MNEMONIC("daa AL");
7589 IEMOP_HLP_NO_64BIT();
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7593}
7594
7595
7596/** Opcode 0x28. */
7597FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7598{
7599 IEMOP_MNEMONIC("sub Eb,Gb");
7600 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7601}
7602
7603
7604/** Opcode 0x29. */
7605FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7606{
7607 IEMOP_MNEMONIC("sub Ev,Gv");
7608 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7609}
7610
7611
7612/** Opcode 0x2a. */
7613FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7614{
7615 IEMOP_MNEMONIC("sub Gb,Eb");
7616 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7617}
7618
7619
7620/** Opcode 0x2b. */
7621FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7622{
7623 IEMOP_MNEMONIC("sub Gv,Ev");
7624 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7625}
7626
7627
7628/** Opcode 0x2c. */
7629FNIEMOP_DEF(iemOp_sub_Al_Ib)
7630{
7631 IEMOP_MNEMONIC("sub al,Ib");
7632 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7633}
7634
7635
7636/** Opcode 0x2d. */
7637FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7638{
7639 IEMOP_MNEMONIC("sub rAX,Iz");
7640 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7641}
7642
7643
7644/** Opcode 0x2e. */
7645FNIEMOP_DEF(iemOp_seg_CS)
7646{
7647 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7648 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7649 pIemCpu->iEffSeg = X86_SREG_CS;
7650
7651 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7652 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7653}
7654
7655
7656/** Opcode 0x2f. */
7657FNIEMOP_DEF(iemOp_das)
7658{
7659 IEMOP_MNEMONIC("das AL");
7660 IEMOP_HLP_NO_64BIT();
7661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7662 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7663 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7664}
7665
7666
7667/** Opcode 0x30. */
7668FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7669{
7670 IEMOP_MNEMONIC("xor Eb,Gb");
7671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7672 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7673}
7674
7675
7676/** Opcode 0x31. */
7677FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7678{
7679 IEMOP_MNEMONIC("xor Ev,Gv");
7680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7682}
7683
7684
7685/** Opcode 0x32. */
7686FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7687{
7688 IEMOP_MNEMONIC("xor Gb,Eb");
7689 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7690 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7691}
7692
7693
7694/** Opcode 0x33. */
7695FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7696{
7697 IEMOP_MNEMONIC("xor Gv,Ev");
7698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7700}
7701
7702
7703/** Opcode 0x34. */
7704FNIEMOP_DEF(iemOp_xor_Al_Ib)
7705{
7706 IEMOP_MNEMONIC("xor al,Ib");
7707 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7708 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7709}
7710
7711
7712/** Opcode 0x35. */
7713FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7714{
7715 IEMOP_MNEMONIC("xor rAX,Iz");
7716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7717 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7718}
7719
7720
7721/** Opcode 0x36. */
7722FNIEMOP_DEF(iemOp_seg_SS)
7723{
7724 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7725 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7726 pIemCpu->iEffSeg = X86_SREG_SS;
7727
7728 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7729 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7730}
7731
7732
7733/** Opcode 0x37. */
7734FNIEMOP_STUB(iemOp_aaa);
7735
7736
7737/** Opcode 0x38. */
7738FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7739{
7740 IEMOP_MNEMONIC("cmp Eb,Gb");
7741 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7742 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7743}
7744
7745
7746/** Opcode 0x39. */
7747FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7748{
7749 IEMOP_MNEMONIC("cmp Ev,Gv");
7750 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7751 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7752}
7753
7754
7755/** Opcode 0x3a. */
7756FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7757{
7758 IEMOP_MNEMONIC("cmp Gb,Eb");
7759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7760}
7761
7762
7763/** Opcode 0x3b. */
7764FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7765{
7766 IEMOP_MNEMONIC("cmp Gv,Ev");
7767 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7768}
7769
7770
7771/** Opcode 0x3c. */
7772FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7773{
7774 IEMOP_MNEMONIC("cmp al,Ib");
7775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7776}
7777
7778
7779/** Opcode 0x3d. */
7780FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7781{
7782 IEMOP_MNEMONIC("cmp rAX,Iz");
7783 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7784}
7785
7786
7787/** Opcode 0x3e. */
7788FNIEMOP_DEF(iemOp_seg_DS)
7789{
7790 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7791 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7792 pIemCpu->iEffSeg = X86_SREG_DS;
7793
7794 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7795 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7796}
7797
7798
7799/** Opcode 0x3f. */
7800FNIEMOP_STUB(iemOp_aas);
7801
7802/**
7803 * Common 'inc/dec/not/neg register' helper.
7804 */
7805FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7806{
7807 IEMOP_HLP_NO_LOCK_PREFIX();
7808 switch (pIemCpu->enmEffOpSize)
7809 {
7810 case IEMMODE_16BIT:
7811 IEM_MC_BEGIN(2, 0);
7812 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7813 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7814 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7815 IEM_MC_REF_EFLAGS(pEFlags);
7816 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7817 IEM_MC_ADVANCE_RIP();
7818 IEM_MC_END();
7819 return VINF_SUCCESS;
7820
7821 case IEMMODE_32BIT:
7822 IEM_MC_BEGIN(2, 0);
7823 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7824 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7825 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7826 IEM_MC_REF_EFLAGS(pEFlags);
7827 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7828 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7829 IEM_MC_ADVANCE_RIP();
7830 IEM_MC_END();
7831 return VINF_SUCCESS;
7832
7833 case IEMMODE_64BIT:
7834 IEM_MC_BEGIN(2, 0);
7835 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7836 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7837 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7838 IEM_MC_REF_EFLAGS(pEFlags);
7839 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7840 IEM_MC_ADVANCE_RIP();
7841 IEM_MC_END();
7842 return VINF_SUCCESS;
7843 }
7844 return VINF_SUCCESS;
7845}
7846
7847
7848/** Opcode 0x40. */
7849FNIEMOP_DEF(iemOp_inc_eAX)
7850{
7851 /*
7852 * This is a REX prefix in 64-bit mode.
7853 */
7854 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7855 {
7856 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7857 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7858
7859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7861 }
7862
7863 IEMOP_MNEMONIC("inc eAX");
7864 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7865}
7866
7867
7868/** Opcode 0x41. */
7869FNIEMOP_DEF(iemOp_inc_eCX)
7870{
7871 /*
7872 * This is a REX prefix in 64-bit mode.
7873 */
7874 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7875 {
7876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7877 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7878 pIemCpu->uRexB = 1 << 3;
7879
7880 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7881 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7882 }
7883
7884 IEMOP_MNEMONIC("inc eCX");
7885 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7886}
7887
7888
7889/** Opcode 0x42. */
7890FNIEMOP_DEF(iemOp_inc_eDX)
7891{
7892 /*
7893 * This is a REX prefix in 64-bit mode.
7894 */
7895 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7896 {
7897 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7898 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7899 pIemCpu->uRexIndex = 1 << 3;
7900
7901 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7902 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7903 }
7904
7905 IEMOP_MNEMONIC("inc eDX");
7906 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7907}
7908
7909
7910
7911/** Opcode 0x43. */
7912FNIEMOP_DEF(iemOp_inc_eBX)
7913{
7914 /*
7915 * This is a REX prefix in 64-bit mode.
7916 */
7917 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7918 {
7919 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7920 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7921 pIemCpu->uRexB = 1 << 3;
7922 pIemCpu->uRexIndex = 1 << 3;
7923
7924 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7925 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7926 }
7927
7928 IEMOP_MNEMONIC("inc eBX");
7929 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7930}
7931
7932
7933/** Opcode 0x44. */
7934FNIEMOP_DEF(iemOp_inc_eSP)
7935{
7936 /*
7937 * This is a REX prefix in 64-bit mode.
7938 */
7939 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7940 {
7941 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7942 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7943 pIemCpu->uRexReg = 1 << 3;
7944
7945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7947 }
7948
7949 IEMOP_MNEMONIC("inc eSP");
7950 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7951}
7952
7953
7954/** Opcode 0x45. */
7955FNIEMOP_DEF(iemOp_inc_eBP)
7956{
7957 /*
7958 * This is a REX prefix in 64-bit mode.
7959 */
7960 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7961 {
7962 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7963 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7964 pIemCpu->uRexReg = 1 << 3;
7965 pIemCpu->uRexB = 1 << 3;
7966
7967 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7968 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7969 }
7970
7971 IEMOP_MNEMONIC("inc eBP");
7972 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7973}
7974
7975
7976/** Opcode 0x46. */
7977FNIEMOP_DEF(iemOp_inc_eSI)
7978{
7979 /*
7980 * This is a REX prefix in 64-bit mode.
7981 */
7982 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7983 {
7984 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7985 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7986 pIemCpu->uRexReg = 1 << 3;
7987 pIemCpu->uRexIndex = 1 << 3;
7988
7989 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7990 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7991 }
7992
7993 IEMOP_MNEMONIC("inc eSI");
7994 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7995}
7996
7997
7998/** Opcode 0x47. */
7999FNIEMOP_DEF(iemOp_inc_eDI)
8000{
8001 /*
8002 * This is a REX prefix in 64-bit mode.
8003 */
8004 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8005 {
8006 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8007 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8008 pIemCpu->uRexReg = 1 << 3;
8009 pIemCpu->uRexB = 1 << 3;
8010 pIemCpu->uRexIndex = 1 << 3;
8011
8012 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8013 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8014 }
8015
8016 IEMOP_MNEMONIC("inc eDI");
8017 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8018}
8019
8020
8021/** Opcode 0x48. */
8022FNIEMOP_DEF(iemOp_dec_eAX)
8023{
8024 /*
8025 * This is a REX prefix in 64-bit mode.
8026 */
8027 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8028 {
8029 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8030 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8031 iemRecalEffOpSize(pIemCpu);
8032
8033 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8034 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8035 }
8036
8037 IEMOP_MNEMONIC("dec eAX");
8038 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8039}
8040
8041
8042/** Opcode 0x49. */
8043FNIEMOP_DEF(iemOp_dec_eCX)
8044{
8045 /*
8046 * This is a REX prefix in 64-bit mode.
8047 */
8048 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8049 {
8050 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8051 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8052 pIemCpu->uRexB = 1 << 3;
8053 iemRecalEffOpSize(pIemCpu);
8054
8055 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8056 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8057 }
8058
8059 IEMOP_MNEMONIC("dec eCX");
8060 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8061}
8062
8063
8064/** Opcode 0x4a. */
8065FNIEMOP_DEF(iemOp_dec_eDX)
8066{
8067 /*
8068 * This is a REX prefix in 64-bit mode.
8069 */
8070 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8071 {
8072 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8073 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8074 pIemCpu->uRexIndex = 1 << 3;
8075 iemRecalEffOpSize(pIemCpu);
8076
8077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8078 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8079 }
8080
8081 IEMOP_MNEMONIC("dec eDX");
8082 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8083}
8084
8085
8086/** Opcode 0x4b. */
8087FNIEMOP_DEF(iemOp_dec_eBX)
8088{
8089 /*
8090 * This is a REX prefix in 64-bit mode.
8091 */
8092 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8093 {
8094 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8095 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8096 pIemCpu->uRexB = 1 << 3;
8097 pIemCpu->uRexIndex = 1 << 3;
8098 iemRecalEffOpSize(pIemCpu);
8099
8100 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8101 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8102 }
8103
8104 IEMOP_MNEMONIC("dec eBX");
8105 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8106}
8107
8108
8109/** Opcode 0x4c. */
8110FNIEMOP_DEF(iemOp_dec_eSP)
8111{
8112 /*
8113 * This is a REX prefix in 64-bit mode.
8114 */
8115 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8116 {
8117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8118 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8119 pIemCpu->uRexReg = 1 << 3;
8120 iemRecalEffOpSize(pIemCpu);
8121
8122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8124 }
8125
8126 IEMOP_MNEMONIC("dec eSP");
8127 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8128}
8129
8130
8131/** Opcode 0x4d. */
8132FNIEMOP_DEF(iemOp_dec_eBP)
8133{
8134 /*
8135 * This is a REX prefix in 64-bit mode.
8136 */
8137 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8138 {
8139 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8140 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8141 pIemCpu->uRexReg = 1 << 3;
8142 pIemCpu->uRexB = 1 << 3;
8143 iemRecalEffOpSize(pIemCpu);
8144
8145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8147 }
8148
8149 IEMOP_MNEMONIC("dec eBP");
8150 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8151}
8152
8153
8154/** Opcode 0x4e. */
8155FNIEMOP_DEF(iemOp_dec_eSI)
8156{
8157 /*
8158 * This is a REX prefix in 64-bit mode.
8159 */
8160 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8161 {
8162 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8163 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8164 pIemCpu->uRexReg = 1 << 3;
8165 pIemCpu->uRexIndex = 1 << 3;
8166 iemRecalEffOpSize(pIemCpu);
8167
8168 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8169 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8170 }
8171
8172 IEMOP_MNEMONIC("dec eSI");
8173 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8174}
8175
8176
8177/** Opcode 0x4f. */
8178FNIEMOP_DEF(iemOp_dec_eDI)
8179{
8180 /*
8181 * This is a REX prefix in 64-bit mode.
8182 */
8183 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8184 {
8185 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8186 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8187 pIemCpu->uRexReg = 1 << 3;
8188 pIemCpu->uRexB = 1 << 3;
8189 pIemCpu->uRexIndex = 1 << 3;
8190 iemRecalEffOpSize(pIemCpu);
8191
8192 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8193 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8194 }
8195
8196 IEMOP_MNEMONIC("dec eDI");
8197 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8198}
8199
8200
8201/**
8202 * Common 'push register' helper.
8203 */
8204FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8205{
8206 IEMOP_HLP_NO_LOCK_PREFIX();
8207 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8208 {
8209 iReg |= pIemCpu->uRexB;
8210 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8211 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8212 }
8213
8214 switch (pIemCpu->enmEffOpSize)
8215 {
8216 case IEMMODE_16BIT:
8217 IEM_MC_BEGIN(0, 1);
8218 IEM_MC_LOCAL(uint16_t, u16Value);
8219 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8220 IEM_MC_PUSH_U16(u16Value);
8221 IEM_MC_ADVANCE_RIP();
8222 IEM_MC_END();
8223 break;
8224
8225 case IEMMODE_32BIT:
8226 IEM_MC_BEGIN(0, 1);
8227 IEM_MC_LOCAL(uint32_t, u32Value);
8228 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8229 IEM_MC_PUSH_U32(u32Value);
8230 IEM_MC_ADVANCE_RIP();
8231 IEM_MC_END();
8232 break;
8233
8234 case IEMMODE_64BIT:
8235 IEM_MC_BEGIN(0, 1);
8236 IEM_MC_LOCAL(uint64_t, u64Value);
8237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8238 IEM_MC_PUSH_U64(u64Value);
8239 IEM_MC_ADVANCE_RIP();
8240 IEM_MC_END();
8241 break;
8242 }
8243
8244 return VINF_SUCCESS;
8245}
8246
8247
8248/** Opcode 0x50. */
8249FNIEMOP_DEF(iemOp_push_eAX)
8250{
8251 IEMOP_MNEMONIC("push rAX");
8252 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8253}
8254
8255
8256/** Opcode 0x51. */
8257FNIEMOP_DEF(iemOp_push_eCX)
8258{
8259 IEMOP_MNEMONIC("push rCX");
8260 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8261}
8262
8263
8264/** Opcode 0x52. */
8265FNIEMOP_DEF(iemOp_push_eDX)
8266{
8267 IEMOP_MNEMONIC("push rDX");
8268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8269}
8270
8271
8272/** Opcode 0x53. */
8273FNIEMOP_DEF(iemOp_push_eBX)
8274{
8275 IEMOP_MNEMONIC("push rBX");
8276 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8277}
8278
8279
8280/** Opcode 0x54. */
8281FNIEMOP_DEF(iemOp_push_eSP)
8282{
8283 IEMOP_MNEMONIC("push rSP");
8284 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8285 {
8286 IEM_MC_BEGIN(0, 1);
8287 IEM_MC_LOCAL(uint16_t, u16Value);
8288 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8289 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8290 IEM_MC_PUSH_U16(u16Value);
8291 IEM_MC_ADVANCE_RIP();
8292 IEM_MC_END();
8293 }
8294 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8295}
8296
8297
8298/** Opcode 0x55. */
8299FNIEMOP_DEF(iemOp_push_eBP)
8300{
8301 IEMOP_MNEMONIC("push rBP");
8302 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8303}
8304
8305
8306/** Opcode 0x56. */
8307FNIEMOP_DEF(iemOp_push_eSI)
8308{
8309 IEMOP_MNEMONIC("push rSI");
8310 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8311}
8312
8313
8314/** Opcode 0x57. */
8315FNIEMOP_DEF(iemOp_push_eDI)
8316{
8317 IEMOP_MNEMONIC("push rDI");
8318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8319}
8320
8321
8322/**
8323 * Common 'pop register' helper.
8324 */
8325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8326{
8327 IEMOP_HLP_NO_LOCK_PREFIX();
8328 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8329 {
8330 iReg |= pIemCpu->uRexB;
8331 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8332 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8333 }
8334
8335 switch (pIemCpu->enmEffOpSize)
8336 {
8337 case IEMMODE_16BIT:
8338 IEM_MC_BEGIN(0, 1);
8339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8341 IEM_MC_POP_U16(pu16Dst);
8342 IEM_MC_ADVANCE_RIP();
8343 IEM_MC_END();
8344 break;
8345
8346 case IEMMODE_32BIT:
8347 IEM_MC_BEGIN(0, 1);
8348 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8349 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8350 IEM_MC_POP_U32(pu32Dst);
8351 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8352 IEM_MC_ADVANCE_RIP();
8353 IEM_MC_END();
8354 break;
8355
8356 case IEMMODE_64BIT:
8357 IEM_MC_BEGIN(0, 1);
8358 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8359 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8360 IEM_MC_POP_U64(pu64Dst);
8361 IEM_MC_ADVANCE_RIP();
8362 IEM_MC_END();
8363 break;
8364 }
8365
8366 return VINF_SUCCESS;
8367}
8368
8369
8370/** Opcode 0x58. */
8371FNIEMOP_DEF(iemOp_pop_eAX)
8372{
8373 IEMOP_MNEMONIC("pop rAX");
8374 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8375}
8376
8377
8378/** Opcode 0x59. */
8379FNIEMOP_DEF(iemOp_pop_eCX)
8380{
8381 IEMOP_MNEMONIC("pop rCX");
8382 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8383}
8384
8385
8386/** Opcode 0x5a. */
8387FNIEMOP_DEF(iemOp_pop_eDX)
8388{
8389 IEMOP_MNEMONIC("pop rDX");
8390 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8391}
8392
8393
8394/** Opcode 0x5b. */
8395FNIEMOP_DEF(iemOp_pop_eBX)
8396{
8397 IEMOP_MNEMONIC("pop rBX");
8398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8399}
8400
8401
8402/** Opcode 0x5c. */
8403FNIEMOP_DEF(iemOp_pop_eSP)
8404{
8405 IEMOP_MNEMONIC("pop rSP");
8406 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8407 {
8408 if (pIemCpu->uRexB)
8409 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8410 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8411 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8412 }
8413
8414 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8415 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8416 /** @todo add testcase for this instruction. */
8417 switch (pIemCpu->enmEffOpSize)
8418 {
8419 case IEMMODE_16BIT:
8420 IEM_MC_BEGIN(0, 1);
8421 IEM_MC_LOCAL(uint16_t, u16Dst);
8422 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8423 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8424 IEM_MC_ADVANCE_RIP();
8425 IEM_MC_END();
8426 break;
8427
8428 case IEMMODE_32BIT:
8429 IEM_MC_BEGIN(0, 1);
8430 IEM_MC_LOCAL(uint32_t, u32Dst);
8431 IEM_MC_POP_U32(&u32Dst);
8432 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8433 IEM_MC_ADVANCE_RIP();
8434 IEM_MC_END();
8435 break;
8436
8437 case IEMMODE_64BIT:
8438 IEM_MC_BEGIN(0, 1);
8439 IEM_MC_LOCAL(uint64_t, u64Dst);
8440 IEM_MC_POP_U64(&u64Dst);
8441 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8442 IEM_MC_ADVANCE_RIP();
8443 IEM_MC_END();
8444 break;
8445 }
8446
8447 return VINF_SUCCESS;
8448}
8449
8450
8451/** Opcode 0x5d. */
8452FNIEMOP_DEF(iemOp_pop_eBP)
8453{
8454 IEMOP_MNEMONIC("pop rBP");
8455 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8456}
8457
8458
8459/** Opcode 0x5e. */
8460FNIEMOP_DEF(iemOp_pop_eSI)
8461{
8462 IEMOP_MNEMONIC("pop rSI");
8463 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8464}
8465
8466
8467/** Opcode 0x5f. */
8468FNIEMOP_DEF(iemOp_pop_eDI)
8469{
8470 IEMOP_MNEMONIC("pop rDI");
8471 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8472}
8473
8474
8475/** Opcode 0x60. */
8476FNIEMOP_DEF(iemOp_pusha)
8477{
8478 IEMOP_MNEMONIC("pusha");
8479 IEMOP_HLP_MIN_186();
8480 IEMOP_HLP_NO_64BIT();
8481 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8482 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8483 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8484 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8485}
8486
8487
8488/** Opcode 0x61. */
8489FNIEMOP_DEF(iemOp_popa)
8490{
8491 IEMOP_MNEMONIC("popa");
8492 IEMOP_HLP_MIN_186();
8493 IEMOP_HLP_NO_64BIT();
8494 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8495 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8496 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8498}
8499
8500
8501/** Opcode 0x62. */
8502FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8503// IEMOP_HLP_MIN_186();
8504
8505
8506/** Opcode 0x63 - non-64-bit modes. */
8507FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8508{
8509 IEMOP_MNEMONIC("arpl Ew,Gw");
8510 IEMOP_HLP_MIN_286();
8511 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8513
8514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8515 {
8516 /* Register */
8517 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8518 IEM_MC_BEGIN(3, 0);
8519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8520 IEM_MC_ARG(uint16_t, u16Src, 1);
8521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8522
8523 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8524 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8525 IEM_MC_REF_EFLAGS(pEFlags);
8526 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8527
8528 IEM_MC_ADVANCE_RIP();
8529 IEM_MC_END();
8530 }
8531 else
8532 {
8533 /* Memory */
8534 IEM_MC_BEGIN(3, 2);
8535 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8536 IEM_MC_ARG(uint16_t, u16Src, 1);
8537 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8539
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8541 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8542 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8543 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8544 IEM_MC_FETCH_EFLAGS(EFlags);
8545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8546
8547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8548 IEM_MC_COMMIT_EFLAGS(EFlags);
8549 IEM_MC_ADVANCE_RIP();
8550 IEM_MC_END();
8551 }
8552 return VINF_SUCCESS;
8553
8554}
8555
8556
8557/** Opcode 0x63.
8558 * @note This is a weird one. It works like a regular move instruction if
8559 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8560 * @todo This definitely needs a testcase to verify the odd cases. */
8561FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8562{
8563 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8564
8565 IEMOP_MNEMONIC("movsxd Gv,Ev");
8566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8567
8568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8569 {
8570 /*
8571 * Register to register.
8572 */
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574 IEM_MC_BEGIN(0, 1);
8575 IEM_MC_LOCAL(uint64_t, u64Value);
8576 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8577 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8578 IEM_MC_ADVANCE_RIP();
8579 IEM_MC_END();
8580 }
8581 else
8582 {
8583 /*
8584 * We're loading a register from memory.
8585 */
8586 IEM_MC_BEGIN(0, 2);
8587 IEM_MC_LOCAL(uint64_t, u64Value);
8588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8591 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8592 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8593 IEM_MC_ADVANCE_RIP();
8594 IEM_MC_END();
8595 }
8596 return VINF_SUCCESS;
8597}
8598
8599
8600/** Opcode 0x64. */
8601FNIEMOP_DEF(iemOp_seg_FS)
8602{
8603 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8604 IEMOP_HLP_MIN_386();
8605
8606 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8607 pIemCpu->iEffSeg = X86_SREG_FS;
8608
8609 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8610 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8611}
8612
8613
8614/** Opcode 0x65. */
8615FNIEMOP_DEF(iemOp_seg_GS)
8616{
8617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8618 IEMOP_HLP_MIN_386();
8619
8620 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8621 pIemCpu->iEffSeg = X86_SREG_GS;
8622
8623 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8624 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8625}
8626
8627
8628/** Opcode 0x66. */
8629FNIEMOP_DEF(iemOp_op_size)
8630{
8631 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8632 IEMOP_HLP_MIN_386();
8633
8634 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8635 iemRecalEffOpSize(pIemCpu);
8636
8637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8639}
8640
8641
8642/** Opcode 0x67. */
8643FNIEMOP_DEF(iemOp_addr_size)
8644{
8645 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8646 IEMOP_HLP_MIN_386();
8647
8648 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8649 switch (pIemCpu->enmDefAddrMode)
8650 {
8651 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8652 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8653 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8654 default: AssertFailed();
8655 }
8656
8657 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8658 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8659}
8660
8661
8662/** Opcode 0x68. */
8663FNIEMOP_DEF(iemOp_push_Iz)
8664{
8665 IEMOP_MNEMONIC("push Iz");
8666 IEMOP_HLP_MIN_186();
8667 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8668 switch (pIemCpu->enmEffOpSize)
8669 {
8670 case IEMMODE_16BIT:
8671 {
8672 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8673 IEMOP_HLP_NO_LOCK_PREFIX();
8674 IEM_MC_BEGIN(0,0);
8675 IEM_MC_PUSH_U16(u16Imm);
8676 IEM_MC_ADVANCE_RIP();
8677 IEM_MC_END();
8678 return VINF_SUCCESS;
8679 }
8680
8681 case IEMMODE_32BIT:
8682 {
8683 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8684 IEMOP_HLP_NO_LOCK_PREFIX();
8685 IEM_MC_BEGIN(0,0);
8686 IEM_MC_PUSH_U32(u32Imm);
8687 IEM_MC_ADVANCE_RIP();
8688 IEM_MC_END();
8689 return VINF_SUCCESS;
8690 }
8691
8692 case IEMMODE_64BIT:
8693 {
8694 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8695 IEMOP_HLP_NO_LOCK_PREFIX();
8696 IEM_MC_BEGIN(0,0);
8697 IEM_MC_PUSH_U64(u64Imm);
8698 IEM_MC_ADVANCE_RIP();
8699 IEM_MC_END();
8700 return VINF_SUCCESS;
8701 }
8702
8703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8704 }
8705}
8706
8707
8708/** Opcode 0x69. */
8709FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8710{
8711 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8712 IEMOP_HLP_MIN_186();
8713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8714 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8715
8716 switch (pIemCpu->enmEffOpSize)
8717 {
8718 case IEMMODE_16BIT:
8719 {
8720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8721 {
8722 /* register operand */
8723 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8725
8726 IEM_MC_BEGIN(3, 1);
8727 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8728 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8729 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8730 IEM_MC_LOCAL(uint16_t, u16Tmp);
8731
8732 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8733 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8734 IEM_MC_REF_EFLAGS(pEFlags);
8735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8736 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8737
8738 IEM_MC_ADVANCE_RIP();
8739 IEM_MC_END();
8740 }
8741 else
8742 {
8743 /* memory operand */
8744 IEM_MC_BEGIN(3, 2);
8745 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8746 IEM_MC_ARG(uint16_t, u16Src, 1);
8747 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8748 IEM_MC_LOCAL(uint16_t, u16Tmp);
8749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8750
8751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8752 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8753 IEM_MC_ASSIGN(u16Src, u16Imm);
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8755 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8756 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8757 IEM_MC_REF_EFLAGS(pEFlags);
8758 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8759 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8760
8761 IEM_MC_ADVANCE_RIP();
8762 IEM_MC_END();
8763 }
8764 return VINF_SUCCESS;
8765 }
8766
8767 case IEMMODE_32BIT:
8768 {
8769 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8770 {
8771 /* register operand */
8772 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8774
8775 IEM_MC_BEGIN(3, 1);
8776 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8777 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8778 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8779 IEM_MC_LOCAL(uint32_t, u32Tmp);
8780
8781 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8782 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8783 IEM_MC_REF_EFLAGS(pEFlags);
8784 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8785 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8786
8787 IEM_MC_ADVANCE_RIP();
8788 IEM_MC_END();
8789 }
8790 else
8791 {
8792 /* memory operand */
8793 IEM_MC_BEGIN(3, 2);
8794 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8795 IEM_MC_ARG(uint32_t, u32Src, 1);
8796 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8797 IEM_MC_LOCAL(uint32_t, u32Tmp);
8798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8799
8800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8801 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8802 IEM_MC_ASSIGN(u32Src, u32Imm);
8803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8804 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8805 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8806 IEM_MC_REF_EFLAGS(pEFlags);
8807 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8808 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8809
8810 IEM_MC_ADVANCE_RIP();
8811 IEM_MC_END();
8812 }
8813 return VINF_SUCCESS;
8814 }
8815
8816 case IEMMODE_64BIT:
8817 {
8818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8819 {
8820 /* register operand */
8821 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8823
8824 IEM_MC_BEGIN(3, 1);
8825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8826 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8828 IEM_MC_LOCAL(uint64_t, u64Tmp);
8829
8830 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8831 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8832 IEM_MC_REF_EFLAGS(pEFlags);
8833 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8834 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8835
8836 IEM_MC_ADVANCE_RIP();
8837 IEM_MC_END();
8838 }
8839 else
8840 {
8841 /* memory operand */
8842 IEM_MC_BEGIN(3, 2);
8843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8844 IEM_MC_ARG(uint64_t, u64Src, 1);
8845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8846 IEM_MC_LOCAL(uint64_t, u64Tmp);
8847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8848
8849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8850 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8851 IEM_MC_ASSIGN(u64Src, u64Imm);
8852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8853 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8854 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8855 IEM_MC_REF_EFLAGS(pEFlags);
8856 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8857 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8858
8859 IEM_MC_ADVANCE_RIP();
8860 IEM_MC_END();
8861 }
8862 return VINF_SUCCESS;
8863 }
8864 }
8865 AssertFailedReturn(VERR_IEM_IPE_9);
8866}
8867
8868
8869/** Opcode 0x6a. */
8870FNIEMOP_DEF(iemOp_push_Ib)
8871{
8872 IEMOP_MNEMONIC("push Ib");
8873 IEMOP_HLP_MIN_186();
8874 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8875 IEMOP_HLP_NO_LOCK_PREFIX();
8876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8877
8878 IEM_MC_BEGIN(0,0);
8879 switch (pIemCpu->enmEffOpSize)
8880 {
8881 case IEMMODE_16BIT:
8882 IEM_MC_PUSH_U16(i8Imm);
8883 break;
8884 case IEMMODE_32BIT:
8885 IEM_MC_PUSH_U32(i8Imm);
8886 break;
8887 case IEMMODE_64BIT:
8888 IEM_MC_PUSH_U64(i8Imm);
8889 break;
8890 }
8891 IEM_MC_ADVANCE_RIP();
8892 IEM_MC_END();
8893 return VINF_SUCCESS;
8894}
8895
8896
8897/** Opcode 0x6b. */
8898FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8899{
8900 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8901 IEMOP_HLP_MIN_186();
8902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8903 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8904
8905 switch (pIemCpu->enmEffOpSize)
8906 {
8907 case IEMMODE_16BIT:
8908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8909 {
8910 /* register operand */
8911 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8913
8914 IEM_MC_BEGIN(3, 1);
8915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8916 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8918 IEM_MC_LOCAL(uint16_t, u16Tmp);
8919
8920 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8921 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8922 IEM_MC_REF_EFLAGS(pEFlags);
8923 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8924 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8925
8926 IEM_MC_ADVANCE_RIP();
8927 IEM_MC_END();
8928 }
8929 else
8930 {
8931 /* memory operand */
8932 IEM_MC_BEGIN(3, 2);
8933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8934 IEM_MC_ARG(uint16_t, u16Src, 1);
8935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8936 IEM_MC_LOCAL(uint16_t, u16Tmp);
8937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8938
8939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8940 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8941 IEM_MC_ASSIGN(u16Src, u16Imm);
8942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8943 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8944 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8945 IEM_MC_REF_EFLAGS(pEFlags);
8946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8947 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8948
8949 IEM_MC_ADVANCE_RIP();
8950 IEM_MC_END();
8951 }
8952 return VINF_SUCCESS;
8953
8954 case IEMMODE_32BIT:
8955 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8956 {
8957 /* register operand */
8958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8960
8961 IEM_MC_BEGIN(3, 1);
8962 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8963 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8965 IEM_MC_LOCAL(uint32_t, u32Tmp);
8966
8967 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8968 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8969 IEM_MC_REF_EFLAGS(pEFlags);
8970 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8971 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8972
8973 IEM_MC_ADVANCE_RIP();
8974 IEM_MC_END();
8975 }
8976 else
8977 {
8978 /* memory operand */
8979 IEM_MC_BEGIN(3, 2);
8980 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8981 IEM_MC_ARG(uint32_t, u32Src, 1);
8982 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8983 IEM_MC_LOCAL(uint32_t, u32Tmp);
8984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8985
8986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8987 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8988 IEM_MC_ASSIGN(u32Src, u32Imm);
8989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8990 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8991 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8992 IEM_MC_REF_EFLAGS(pEFlags);
8993 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8994 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8995
8996 IEM_MC_ADVANCE_RIP();
8997 IEM_MC_END();
8998 }
8999 return VINF_SUCCESS;
9000
9001 case IEMMODE_64BIT:
9002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9003 {
9004 /* register operand */
9005 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9007
9008 IEM_MC_BEGIN(3, 1);
9009 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9010 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9012 IEM_MC_LOCAL(uint64_t, u64Tmp);
9013
9014 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9015 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9016 IEM_MC_REF_EFLAGS(pEFlags);
9017 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9018 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9019
9020 IEM_MC_ADVANCE_RIP();
9021 IEM_MC_END();
9022 }
9023 else
9024 {
9025 /* memory operand */
9026 IEM_MC_BEGIN(3, 2);
9027 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9028 IEM_MC_ARG(uint64_t, u64Src, 1);
9029 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9030 IEM_MC_LOCAL(uint64_t, u64Tmp);
9031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9032
9033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9034 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9035 IEM_MC_ASSIGN(u64Src, u64Imm);
9036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9037 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9038 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9039 IEM_MC_REF_EFLAGS(pEFlags);
9040 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9042
9043 IEM_MC_ADVANCE_RIP();
9044 IEM_MC_END();
9045 }
9046 return VINF_SUCCESS;
9047 }
9048 AssertFailedReturn(VERR_IEM_IPE_8);
9049}
9050
9051
9052/** Opcode 0x6c. */
9053FNIEMOP_DEF(iemOp_insb_Yb_DX)
9054{
9055 IEMOP_HLP_MIN_186();
9056 IEMOP_HLP_NO_LOCK_PREFIX();
9057 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9058 {
9059 IEMOP_MNEMONIC("rep ins Yb,DX");
9060 switch (pIemCpu->enmEffAddrMode)
9061 {
9062 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9063 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9064 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9066 }
9067 }
9068 else
9069 {
9070 IEMOP_MNEMONIC("ins Yb,DX");
9071 switch (pIemCpu->enmEffAddrMode)
9072 {
9073 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9074 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9075 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9077 }
9078 }
9079}
9080
9081
9082/** Opcode 0x6d. */
9083FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9084{
9085 IEMOP_HLP_MIN_186();
9086 IEMOP_HLP_NO_LOCK_PREFIX();
9087 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9088 {
9089 IEMOP_MNEMONIC("rep ins Yv,DX");
9090 switch (pIemCpu->enmEffOpSize)
9091 {
9092 case IEMMODE_16BIT:
9093 switch (pIemCpu->enmEffAddrMode)
9094 {
9095 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9096 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9097 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9099 }
9100 break;
9101 case IEMMODE_64BIT:
9102 case IEMMODE_32BIT:
9103 switch (pIemCpu->enmEffAddrMode)
9104 {
9105 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9106 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9107 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9109 }
9110 break;
9111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9112 }
9113 }
9114 else
9115 {
9116 IEMOP_MNEMONIC("ins Yv,DX");
9117 switch (pIemCpu->enmEffOpSize)
9118 {
9119 case IEMMODE_16BIT:
9120 switch (pIemCpu->enmEffAddrMode)
9121 {
9122 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9123 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9124 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9126 }
9127 break;
9128 case IEMMODE_64BIT:
9129 case IEMMODE_32BIT:
9130 switch (pIemCpu->enmEffAddrMode)
9131 {
9132 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9133 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9134 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9136 }
9137 break;
9138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9139 }
9140 }
9141}
9142
9143
9144/** Opcode 0x6e. */
9145FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9146{
9147 IEMOP_HLP_MIN_186();
9148 IEMOP_HLP_NO_LOCK_PREFIX();
9149 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9150 {
9151 IEMOP_MNEMONIC("rep outs DX,Yb");
9152 switch (pIemCpu->enmEffAddrMode)
9153 {
9154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9158 }
9159 }
9160 else
9161 {
9162 IEMOP_MNEMONIC("outs DX,Yb");
9163 switch (pIemCpu->enmEffAddrMode)
9164 {
9165 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9166 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9167 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9169 }
9170 }
9171}
9172
9173
9174/** Opcode 0x6f. */
9175FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9176{
9177 IEMOP_HLP_MIN_186();
9178 IEMOP_HLP_NO_LOCK_PREFIX();
9179 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9180 {
9181 IEMOP_MNEMONIC("rep outs DX,Yv");
9182 switch (pIemCpu->enmEffOpSize)
9183 {
9184 case IEMMODE_16BIT:
9185 switch (pIemCpu->enmEffAddrMode)
9186 {
9187 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9191 }
9192 break;
9193 case IEMMODE_64BIT:
9194 case IEMMODE_32BIT:
9195 switch (pIemCpu->enmEffAddrMode)
9196 {
9197 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9198 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9199 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9201 }
9202 break;
9203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9204 }
9205 }
9206 else
9207 {
9208 IEMOP_MNEMONIC("outs DX,Yv");
9209 switch (pIemCpu->enmEffOpSize)
9210 {
9211 case IEMMODE_16BIT:
9212 switch (pIemCpu->enmEffAddrMode)
9213 {
9214 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9215 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9216 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9218 }
9219 break;
9220 case IEMMODE_64BIT:
9221 case IEMMODE_32BIT:
9222 switch (pIemCpu->enmEffAddrMode)
9223 {
9224 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9225 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9226 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9228 }
9229 break;
9230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9231 }
9232 }
9233}
9234
9235
9236/** Opcode 0x70. */
9237FNIEMOP_DEF(iemOp_jo_Jb)
9238{
9239 IEMOP_MNEMONIC("jo Jb");
9240 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9241 IEMOP_HLP_NO_LOCK_PREFIX();
9242 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9243
9244 IEM_MC_BEGIN(0, 0);
9245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9246 IEM_MC_REL_JMP_S8(i8Imm);
9247 } IEM_MC_ELSE() {
9248 IEM_MC_ADVANCE_RIP();
9249 } IEM_MC_ENDIF();
9250 IEM_MC_END();
9251 return VINF_SUCCESS;
9252}
9253
9254
9255/** Opcode 0x71. */
9256FNIEMOP_DEF(iemOp_jno_Jb)
9257{
9258 IEMOP_MNEMONIC("jno Jb");
9259 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9260 IEMOP_HLP_NO_LOCK_PREFIX();
9261 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9262
9263 IEM_MC_BEGIN(0, 0);
9264 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9265 IEM_MC_ADVANCE_RIP();
9266 } IEM_MC_ELSE() {
9267 IEM_MC_REL_JMP_S8(i8Imm);
9268 } IEM_MC_ENDIF();
9269 IEM_MC_END();
9270 return VINF_SUCCESS;
9271}
9272
9273/** Opcode 0x72. */
9274FNIEMOP_DEF(iemOp_jc_Jb)
9275{
9276 IEMOP_MNEMONIC("jc/jnae Jb");
9277 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9278 IEMOP_HLP_NO_LOCK_PREFIX();
9279 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9280
9281 IEM_MC_BEGIN(0, 0);
9282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9283 IEM_MC_REL_JMP_S8(i8Imm);
9284 } IEM_MC_ELSE() {
9285 IEM_MC_ADVANCE_RIP();
9286 } IEM_MC_ENDIF();
9287 IEM_MC_END();
9288 return VINF_SUCCESS;
9289}
9290
9291
9292/** Opcode 0x73. */
9293FNIEMOP_DEF(iemOp_jnc_Jb)
9294{
9295 IEMOP_MNEMONIC("jnc/jnb Jb");
9296 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9297 IEMOP_HLP_NO_LOCK_PREFIX();
9298 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9299
9300 IEM_MC_BEGIN(0, 0);
9301 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9302 IEM_MC_ADVANCE_RIP();
9303 } IEM_MC_ELSE() {
9304 IEM_MC_REL_JMP_S8(i8Imm);
9305 } IEM_MC_ENDIF();
9306 IEM_MC_END();
9307 return VINF_SUCCESS;
9308}
9309
9310
9311/** Opcode 0x74. */
9312FNIEMOP_DEF(iemOp_je_Jb)
9313{
9314 IEMOP_MNEMONIC("je/jz Jb");
9315 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9316 IEMOP_HLP_NO_LOCK_PREFIX();
9317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9318
9319 IEM_MC_BEGIN(0, 0);
9320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9321 IEM_MC_REL_JMP_S8(i8Imm);
9322 } IEM_MC_ELSE() {
9323 IEM_MC_ADVANCE_RIP();
9324 } IEM_MC_ENDIF();
9325 IEM_MC_END();
9326 return VINF_SUCCESS;
9327}
9328
9329
9330/** Opcode 0x75. */
9331FNIEMOP_DEF(iemOp_jne_Jb)
9332{
9333 IEMOP_MNEMONIC("jne/jnz Jb");
9334 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9335 IEMOP_HLP_NO_LOCK_PREFIX();
9336 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9337
9338 IEM_MC_BEGIN(0, 0);
9339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9340 IEM_MC_ADVANCE_RIP();
9341 } IEM_MC_ELSE() {
9342 IEM_MC_REL_JMP_S8(i8Imm);
9343 } IEM_MC_ENDIF();
9344 IEM_MC_END();
9345 return VINF_SUCCESS;
9346}
9347
9348
9349/** Opcode 0x76. */
9350FNIEMOP_DEF(iemOp_jbe_Jb)
9351{
9352 IEMOP_MNEMONIC("jbe/jna Jb");
9353 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9354 IEMOP_HLP_NO_LOCK_PREFIX();
9355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9356
9357 IEM_MC_BEGIN(0, 0);
9358 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9359 IEM_MC_REL_JMP_S8(i8Imm);
9360 } IEM_MC_ELSE() {
9361 IEM_MC_ADVANCE_RIP();
9362 } IEM_MC_ENDIF();
9363 IEM_MC_END();
9364 return VINF_SUCCESS;
9365}
9366
9367
9368/** Opcode 0x77. */
9369FNIEMOP_DEF(iemOp_jnbe_Jb)
9370{
9371 IEMOP_MNEMONIC("jnbe/ja Jb");
9372 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9373 IEMOP_HLP_NO_LOCK_PREFIX();
9374 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9375
9376 IEM_MC_BEGIN(0, 0);
9377 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9378 IEM_MC_ADVANCE_RIP();
9379 } IEM_MC_ELSE() {
9380 IEM_MC_REL_JMP_S8(i8Imm);
9381 } IEM_MC_ENDIF();
9382 IEM_MC_END();
9383 return VINF_SUCCESS;
9384}
9385
9386
9387/** Opcode 0x78. */
9388FNIEMOP_DEF(iemOp_js_Jb)
9389{
9390 IEMOP_MNEMONIC("js Jb");
9391 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9392 IEMOP_HLP_NO_LOCK_PREFIX();
9393 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9394
9395 IEM_MC_BEGIN(0, 0);
9396 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9397 IEM_MC_REL_JMP_S8(i8Imm);
9398 } IEM_MC_ELSE() {
9399 IEM_MC_ADVANCE_RIP();
9400 } IEM_MC_ENDIF();
9401 IEM_MC_END();
9402 return VINF_SUCCESS;
9403}
9404
9405
9406/** Opcode 0x79. */
9407FNIEMOP_DEF(iemOp_jns_Jb)
9408{
9409 IEMOP_MNEMONIC("jns Jb");
9410 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9411 IEMOP_HLP_NO_LOCK_PREFIX();
9412 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9413
9414 IEM_MC_BEGIN(0, 0);
9415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9416 IEM_MC_ADVANCE_RIP();
9417 } IEM_MC_ELSE() {
9418 IEM_MC_REL_JMP_S8(i8Imm);
9419 } IEM_MC_ENDIF();
9420 IEM_MC_END();
9421 return VINF_SUCCESS;
9422}
9423
9424
9425/** Opcode 0x7a. */
9426FNIEMOP_DEF(iemOp_jp_Jb)
9427{
9428 IEMOP_MNEMONIC("jp Jb");
9429 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9430 IEMOP_HLP_NO_LOCK_PREFIX();
9431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9432
9433 IEM_MC_BEGIN(0, 0);
9434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9435 IEM_MC_REL_JMP_S8(i8Imm);
9436 } IEM_MC_ELSE() {
9437 IEM_MC_ADVANCE_RIP();
9438 } IEM_MC_ENDIF();
9439 IEM_MC_END();
9440 return VINF_SUCCESS;
9441}
9442
9443
9444/** Opcode 0x7b. */
9445FNIEMOP_DEF(iemOp_jnp_Jb)
9446{
9447 IEMOP_MNEMONIC("jnp Jb");
9448 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9449 IEMOP_HLP_NO_LOCK_PREFIX();
9450 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9451
9452 IEM_MC_BEGIN(0, 0);
9453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9454 IEM_MC_ADVANCE_RIP();
9455 } IEM_MC_ELSE() {
9456 IEM_MC_REL_JMP_S8(i8Imm);
9457 } IEM_MC_ENDIF();
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460}
9461
9462
9463/** Opcode 0x7c. */
9464FNIEMOP_DEF(iemOp_jl_Jb)
9465{
9466 IEMOP_MNEMONIC("jl/jnge Jb");
9467 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9468 IEMOP_HLP_NO_LOCK_PREFIX();
9469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9470
9471 IEM_MC_BEGIN(0, 0);
9472 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9473 IEM_MC_REL_JMP_S8(i8Imm);
9474 } IEM_MC_ELSE() {
9475 IEM_MC_ADVANCE_RIP();
9476 } IEM_MC_ENDIF();
9477 IEM_MC_END();
9478 return VINF_SUCCESS;
9479}
9480
9481
9482/** Opcode 0x7d. */
9483FNIEMOP_DEF(iemOp_jnl_Jb)
9484{
9485 IEMOP_MNEMONIC("jnl/jge Jb");
9486 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9487 IEMOP_HLP_NO_LOCK_PREFIX();
9488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9489
9490 IEM_MC_BEGIN(0, 0);
9491 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9492 IEM_MC_ADVANCE_RIP();
9493 } IEM_MC_ELSE() {
9494 IEM_MC_REL_JMP_S8(i8Imm);
9495 } IEM_MC_ENDIF();
9496 IEM_MC_END();
9497 return VINF_SUCCESS;
9498}
9499
9500
9501/** Opcode 0x7e. */
9502FNIEMOP_DEF(iemOp_jle_Jb)
9503{
9504 IEMOP_MNEMONIC("jle/jng Jb");
9505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9506 IEMOP_HLP_NO_LOCK_PREFIX();
9507 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9508
9509 IEM_MC_BEGIN(0, 0);
9510 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9511 IEM_MC_REL_JMP_S8(i8Imm);
9512 } IEM_MC_ELSE() {
9513 IEM_MC_ADVANCE_RIP();
9514 } IEM_MC_ENDIF();
9515 IEM_MC_END();
9516 return VINF_SUCCESS;
9517}
9518
9519
9520/** Opcode 0x7f. */
9521FNIEMOP_DEF(iemOp_jnle_Jb)
9522{
9523 IEMOP_MNEMONIC("jnle/jg Jb");
9524 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9525 IEMOP_HLP_NO_LOCK_PREFIX();
9526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9527
9528 IEM_MC_BEGIN(0, 0);
9529 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9530 IEM_MC_ADVANCE_RIP();
9531 } IEM_MC_ELSE() {
9532 IEM_MC_REL_JMP_S8(i8Imm);
9533 } IEM_MC_ENDIF();
9534 IEM_MC_END();
9535 return VINF_SUCCESS;
9536}
9537
9538
9539/** Opcode 0x80. */
9540FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9541{
9542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9543 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9544 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9545
9546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9547 {
9548 /* register target */
9549 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9550 IEMOP_HLP_NO_LOCK_PREFIX();
9551 IEM_MC_BEGIN(3, 0);
9552 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9553 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9554 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9555
9556 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9557 IEM_MC_REF_EFLAGS(pEFlags);
9558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9559
9560 IEM_MC_ADVANCE_RIP();
9561 IEM_MC_END();
9562 }
9563 else
9564 {
9565 /* memory target */
9566 uint32_t fAccess;
9567 if (pImpl->pfnLockedU8)
9568 fAccess = IEM_ACCESS_DATA_RW;
9569 else
9570 { /* CMP */
9571 IEMOP_HLP_NO_LOCK_PREFIX();
9572 fAccess = IEM_ACCESS_DATA_R;
9573 }
9574 IEM_MC_BEGIN(3, 2);
9575 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9576 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9578
9579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9580 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9581 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9582
9583 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9584 IEM_MC_FETCH_EFLAGS(EFlags);
9585 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9586 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9587 else
9588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9589
9590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9591 IEM_MC_COMMIT_EFLAGS(EFlags);
9592 IEM_MC_ADVANCE_RIP();
9593 IEM_MC_END();
9594 }
9595 return VINF_SUCCESS;
9596}
9597
9598
9599/** Opcode 0x81. */
9600FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9601{
9602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9603 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9604 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9605
9606 switch (pIemCpu->enmEffOpSize)
9607 {
9608 case IEMMODE_16BIT:
9609 {
9610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9611 {
9612 /* register target */
9613 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9614 IEMOP_HLP_NO_LOCK_PREFIX();
9615 IEM_MC_BEGIN(3, 0);
9616 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9617 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9618 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9619
9620 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9621 IEM_MC_REF_EFLAGS(pEFlags);
9622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9623
9624 IEM_MC_ADVANCE_RIP();
9625 IEM_MC_END();
9626 }
9627 else
9628 {
9629 /* memory target */
9630 uint32_t fAccess;
9631 if (pImpl->pfnLockedU16)
9632 fAccess = IEM_ACCESS_DATA_RW;
9633 else
9634 { /* CMP, TEST */
9635 IEMOP_HLP_NO_LOCK_PREFIX();
9636 fAccess = IEM_ACCESS_DATA_R;
9637 }
9638 IEM_MC_BEGIN(3, 2);
9639 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9640 IEM_MC_ARG(uint16_t, u16Src, 1);
9641 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9643
9644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9645 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9646 IEM_MC_ASSIGN(u16Src, u16Imm);
9647 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9648 IEM_MC_FETCH_EFLAGS(EFlags);
9649 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9651 else
9652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9653
9654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9655 IEM_MC_COMMIT_EFLAGS(EFlags);
9656 IEM_MC_ADVANCE_RIP();
9657 IEM_MC_END();
9658 }
9659 break;
9660 }
9661
9662 case IEMMODE_32BIT:
9663 {
9664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9665 {
9666 /* register target */
9667 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9668 IEMOP_HLP_NO_LOCK_PREFIX();
9669 IEM_MC_BEGIN(3, 0);
9670 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9671 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9673
9674 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9675 IEM_MC_REF_EFLAGS(pEFlags);
9676 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9677 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9678
9679 IEM_MC_ADVANCE_RIP();
9680 IEM_MC_END();
9681 }
9682 else
9683 {
9684 /* memory target */
9685 uint32_t fAccess;
9686 if (pImpl->pfnLockedU32)
9687 fAccess = IEM_ACCESS_DATA_RW;
9688 else
9689 { /* CMP, TEST */
9690 IEMOP_HLP_NO_LOCK_PREFIX();
9691 fAccess = IEM_ACCESS_DATA_R;
9692 }
9693 IEM_MC_BEGIN(3, 2);
9694 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9695 IEM_MC_ARG(uint32_t, u32Src, 1);
9696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9698
9699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9700 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9701 IEM_MC_ASSIGN(u32Src, u32Imm);
9702 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9703 IEM_MC_FETCH_EFLAGS(EFlags);
9704 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9705 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9706 else
9707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9708
9709 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9710 IEM_MC_COMMIT_EFLAGS(EFlags);
9711 IEM_MC_ADVANCE_RIP();
9712 IEM_MC_END();
9713 }
9714 break;
9715 }
9716
9717 case IEMMODE_64BIT:
9718 {
9719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9720 {
9721 /* register target */
9722 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9723 IEMOP_HLP_NO_LOCK_PREFIX();
9724 IEM_MC_BEGIN(3, 0);
9725 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9726 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9727 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9728
9729 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9730 IEM_MC_REF_EFLAGS(pEFlags);
9731 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9732
9733 IEM_MC_ADVANCE_RIP();
9734 IEM_MC_END();
9735 }
9736 else
9737 {
9738 /* memory target */
9739 uint32_t fAccess;
9740 if (pImpl->pfnLockedU64)
9741 fAccess = IEM_ACCESS_DATA_RW;
9742 else
9743 { /* CMP */
9744 IEMOP_HLP_NO_LOCK_PREFIX();
9745 fAccess = IEM_ACCESS_DATA_R;
9746 }
9747 IEM_MC_BEGIN(3, 2);
9748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9749 IEM_MC_ARG(uint64_t, u64Src, 1);
9750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9752
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9754 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9755 IEM_MC_ASSIGN(u64Src, u64Imm);
9756 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9757 IEM_MC_FETCH_EFLAGS(EFlags);
9758 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9760 else
9761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9762
9763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9764 IEM_MC_COMMIT_EFLAGS(EFlags);
9765 IEM_MC_ADVANCE_RIP();
9766 IEM_MC_END();
9767 }
9768 break;
9769 }
9770 }
9771 return VINF_SUCCESS;
9772}
9773
9774
9775/** Opcode 0x82. */
9776FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9777{
9778 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9779 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9780}
9781
9782
9783/** Opcode 0x83. */
9784FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9785{
9786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9787 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9788 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9789 to the 386 even if absent in the intel reference manuals and some
9790 3rd party opcode listings. */
9791 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9792
9793 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9794 {
9795 /*
9796 * Register target
9797 */
9798 IEMOP_HLP_NO_LOCK_PREFIX();
9799 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9800 switch (pIemCpu->enmEffOpSize)
9801 {
9802 case IEMMODE_16BIT:
9803 {
9804 IEM_MC_BEGIN(3, 0);
9805 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9806 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9807 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9808
9809 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9810 IEM_MC_REF_EFLAGS(pEFlags);
9811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9812
9813 IEM_MC_ADVANCE_RIP();
9814 IEM_MC_END();
9815 break;
9816 }
9817
9818 case IEMMODE_32BIT:
9819 {
9820 IEM_MC_BEGIN(3, 0);
9821 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9822 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9824
9825 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9826 IEM_MC_REF_EFLAGS(pEFlags);
9827 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9828 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9829
9830 IEM_MC_ADVANCE_RIP();
9831 IEM_MC_END();
9832 break;
9833 }
9834
9835 case IEMMODE_64BIT:
9836 {
9837 IEM_MC_BEGIN(3, 0);
9838 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9839 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9841
9842 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9843 IEM_MC_REF_EFLAGS(pEFlags);
9844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9845
9846 IEM_MC_ADVANCE_RIP();
9847 IEM_MC_END();
9848 break;
9849 }
9850 }
9851 }
9852 else
9853 {
9854 /*
9855 * Memory target.
9856 */
9857 uint32_t fAccess;
9858 if (pImpl->pfnLockedU16)
9859 fAccess = IEM_ACCESS_DATA_RW;
9860 else
9861 { /* CMP */
9862 IEMOP_HLP_NO_LOCK_PREFIX();
9863 fAccess = IEM_ACCESS_DATA_R;
9864 }
9865
9866 switch (pIemCpu->enmEffOpSize)
9867 {
9868 case IEMMODE_16BIT:
9869 {
9870 IEM_MC_BEGIN(3, 2);
9871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9872 IEM_MC_ARG(uint16_t, u16Src, 1);
9873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9875
9876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9877 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9878 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9879 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9880 IEM_MC_FETCH_EFLAGS(EFlags);
9881 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9883 else
9884 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9885
9886 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9887 IEM_MC_COMMIT_EFLAGS(EFlags);
9888 IEM_MC_ADVANCE_RIP();
9889 IEM_MC_END();
9890 break;
9891 }
9892
9893 case IEMMODE_32BIT:
9894 {
9895 IEM_MC_BEGIN(3, 2);
9896 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9897 IEM_MC_ARG(uint32_t, u32Src, 1);
9898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9900
9901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9902 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9903 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9904 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9905 IEM_MC_FETCH_EFLAGS(EFlags);
9906 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9907 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9908 else
9909 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9910
9911 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9912 IEM_MC_COMMIT_EFLAGS(EFlags);
9913 IEM_MC_ADVANCE_RIP();
9914 IEM_MC_END();
9915 break;
9916 }
9917
9918 case IEMMODE_64BIT:
9919 {
9920 IEM_MC_BEGIN(3, 2);
9921 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9922 IEM_MC_ARG(uint64_t, u64Src, 1);
9923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9925
9926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9927 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9928 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9929 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9930 IEM_MC_FETCH_EFLAGS(EFlags);
9931 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9933 else
9934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9935
9936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9937 IEM_MC_COMMIT_EFLAGS(EFlags);
9938 IEM_MC_ADVANCE_RIP();
9939 IEM_MC_END();
9940 break;
9941 }
9942 }
9943 }
9944 return VINF_SUCCESS;
9945}
9946
9947
9948/** Opcode 0x84. */
9949FNIEMOP_DEF(iemOp_test_Eb_Gb)
9950{
9951 IEMOP_MNEMONIC("test Eb,Gb");
9952 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9955}
9956
9957
9958/** Opcode 0x85. */
9959FNIEMOP_DEF(iemOp_test_Ev_Gv)
9960{
9961 IEMOP_MNEMONIC("test Ev,Gv");
9962 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9963 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9965}
9966
9967
9968/** Opcode 0x86. */
9969FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9970{
9971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9972 IEMOP_MNEMONIC("xchg Eb,Gb");
9973
9974 /*
9975 * If rm is denoting a register, no more instruction bytes.
9976 */
9977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9978 {
9979 IEMOP_HLP_NO_LOCK_PREFIX();
9980
9981 IEM_MC_BEGIN(0, 2);
9982 IEM_MC_LOCAL(uint8_t, uTmp1);
9983 IEM_MC_LOCAL(uint8_t, uTmp2);
9984
9985 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9986 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9987 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9988 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9989
9990 IEM_MC_ADVANCE_RIP();
9991 IEM_MC_END();
9992 }
9993 else
9994 {
9995 /*
9996 * We're accessing memory.
9997 */
9998/** @todo the register must be committed separately! */
9999 IEM_MC_BEGIN(2, 2);
10000 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10001 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10003
10004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10005 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10006 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10007 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10009
10010 IEM_MC_ADVANCE_RIP();
10011 IEM_MC_END();
10012 }
10013 return VINF_SUCCESS;
10014}
10015
10016
10017/** Opcode 0x87. */
10018FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10019{
10020 IEMOP_MNEMONIC("xchg Ev,Gv");
10021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10022
10023 /*
10024 * If rm is denoting a register, no more instruction bytes.
10025 */
10026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10027 {
10028 IEMOP_HLP_NO_LOCK_PREFIX();
10029
10030 switch (pIemCpu->enmEffOpSize)
10031 {
10032 case IEMMODE_16BIT:
10033 IEM_MC_BEGIN(0, 2);
10034 IEM_MC_LOCAL(uint16_t, uTmp1);
10035 IEM_MC_LOCAL(uint16_t, uTmp2);
10036
10037 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10038 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10039 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10040 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10041
10042 IEM_MC_ADVANCE_RIP();
10043 IEM_MC_END();
10044 return VINF_SUCCESS;
10045
10046 case IEMMODE_32BIT:
10047 IEM_MC_BEGIN(0, 2);
10048 IEM_MC_LOCAL(uint32_t, uTmp1);
10049 IEM_MC_LOCAL(uint32_t, uTmp2);
10050
10051 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10052 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10053 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10054 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10055
10056 IEM_MC_ADVANCE_RIP();
10057 IEM_MC_END();
10058 return VINF_SUCCESS;
10059
10060 case IEMMODE_64BIT:
10061 IEM_MC_BEGIN(0, 2);
10062 IEM_MC_LOCAL(uint64_t, uTmp1);
10063 IEM_MC_LOCAL(uint64_t, uTmp2);
10064
10065 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10066 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10067 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10068 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10069
10070 IEM_MC_ADVANCE_RIP();
10071 IEM_MC_END();
10072 return VINF_SUCCESS;
10073
10074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10075 }
10076 }
10077 else
10078 {
10079 /*
10080 * We're accessing memory.
10081 */
10082 switch (pIemCpu->enmEffOpSize)
10083 {
10084/** @todo the register must be committed separately! */
10085 case IEMMODE_16BIT:
10086 IEM_MC_BEGIN(2, 2);
10087 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10088 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10090
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10092 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10093 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10094 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10095 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10096
10097 IEM_MC_ADVANCE_RIP();
10098 IEM_MC_END();
10099 return VINF_SUCCESS;
10100
10101 case IEMMODE_32BIT:
10102 IEM_MC_BEGIN(2, 2);
10103 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10104 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10106
10107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10108 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10109 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10110 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10111 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10112
10113 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10114 IEM_MC_ADVANCE_RIP();
10115 IEM_MC_END();
10116 return VINF_SUCCESS;
10117
10118 case IEMMODE_64BIT:
10119 IEM_MC_BEGIN(2, 2);
10120 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10121 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10123
10124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10125 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10126 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10127 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10128 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10129
10130 IEM_MC_ADVANCE_RIP();
10131 IEM_MC_END();
10132 return VINF_SUCCESS;
10133
10134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10135 }
10136 }
10137}
10138
10139
10140/** Opcode 0x88. */
10141FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10142{
10143 IEMOP_MNEMONIC("mov Eb,Gb");
10144
10145 uint8_t bRm;
10146 IEM_OPCODE_GET_NEXT_U8(&bRm);
10147 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10148
10149 /*
10150 * If rm is denoting a register, no more instruction bytes.
10151 */
10152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10153 {
10154 IEM_MC_BEGIN(0, 1);
10155 IEM_MC_LOCAL(uint8_t, u8Value);
10156 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10157 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10158 IEM_MC_ADVANCE_RIP();
10159 IEM_MC_END();
10160 }
10161 else
10162 {
10163 /*
10164 * We're writing a register to memory.
10165 */
10166 IEM_MC_BEGIN(0, 2);
10167 IEM_MC_LOCAL(uint8_t, u8Value);
10168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10170 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10171 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10172 IEM_MC_ADVANCE_RIP();
10173 IEM_MC_END();
10174 }
10175 return VINF_SUCCESS;
10176
10177}
10178
10179
10180/** Opcode 0x89. */
10181FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10182{
10183 IEMOP_MNEMONIC("mov Ev,Gv");
10184
10185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10186 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10187
10188 /*
10189 * If rm is denoting a register, no more instruction bytes.
10190 */
10191 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10192 {
10193 switch (pIemCpu->enmEffOpSize)
10194 {
10195 case IEMMODE_16BIT:
10196 IEM_MC_BEGIN(0, 1);
10197 IEM_MC_LOCAL(uint16_t, u16Value);
10198 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10199 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10200 IEM_MC_ADVANCE_RIP();
10201 IEM_MC_END();
10202 break;
10203
10204 case IEMMODE_32BIT:
10205 IEM_MC_BEGIN(0, 1);
10206 IEM_MC_LOCAL(uint32_t, u32Value);
10207 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10208 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10209 IEM_MC_ADVANCE_RIP();
10210 IEM_MC_END();
10211 break;
10212
10213 case IEMMODE_64BIT:
10214 IEM_MC_BEGIN(0, 1);
10215 IEM_MC_LOCAL(uint64_t, u64Value);
10216 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10217 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10218 IEM_MC_ADVANCE_RIP();
10219 IEM_MC_END();
10220 break;
10221 }
10222 }
10223 else
10224 {
10225 /*
10226 * We're writing a register to memory.
10227 */
10228 switch (pIemCpu->enmEffOpSize)
10229 {
10230 case IEMMODE_16BIT:
10231 IEM_MC_BEGIN(0, 2);
10232 IEM_MC_LOCAL(uint16_t, u16Value);
10233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10235 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10236 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10237 IEM_MC_ADVANCE_RIP();
10238 IEM_MC_END();
10239 break;
10240
10241 case IEMMODE_32BIT:
10242 IEM_MC_BEGIN(0, 2);
10243 IEM_MC_LOCAL(uint32_t, u32Value);
10244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10246 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10247 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10248 IEM_MC_ADVANCE_RIP();
10249 IEM_MC_END();
10250 break;
10251
10252 case IEMMODE_64BIT:
10253 IEM_MC_BEGIN(0, 2);
10254 IEM_MC_LOCAL(uint64_t, u64Value);
10255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10257 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10258 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10259 IEM_MC_ADVANCE_RIP();
10260 IEM_MC_END();
10261 break;
10262 }
10263 }
10264 return VINF_SUCCESS;
10265}
10266
10267
10268/** Opcode 0x8a. */
10269FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10270{
10271 IEMOP_MNEMONIC("mov Gb,Eb");
10272
10273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10274 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10275
10276 /*
10277 * If rm is denoting a register, no more instruction bytes.
10278 */
10279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10280 {
10281 IEM_MC_BEGIN(0, 1);
10282 IEM_MC_LOCAL(uint8_t, u8Value);
10283 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10284 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10285 IEM_MC_ADVANCE_RIP();
10286 IEM_MC_END();
10287 }
10288 else
10289 {
10290 /*
10291 * We're loading a register from memory.
10292 */
10293 IEM_MC_BEGIN(0, 2);
10294 IEM_MC_LOCAL(uint8_t, u8Value);
10295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10297 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10298 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10299 IEM_MC_ADVANCE_RIP();
10300 IEM_MC_END();
10301 }
10302 return VINF_SUCCESS;
10303}
10304
10305
10306/** Opcode 0x8b. */
10307FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10308{
10309 IEMOP_MNEMONIC("mov Gv,Ev");
10310
10311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10312 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10313
10314 /*
10315 * If rm is denoting a register, no more instruction bytes.
10316 */
10317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10318 {
10319 switch (pIemCpu->enmEffOpSize)
10320 {
10321 case IEMMODE_16BIT:
10322 IEM_MC_BEGIN(0, 1);
10323 IEM_MC_LOCAL(uint16_t, u16Value);
10324 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10325 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10326 IEM_MC_ADVANCE_RIP();
10327 IEM_MC_END();
10328 break;
10329
10330 case IEMMODE_32BIT:
10331 IEM_MC_BEGIN(0, 1);
10332 IEM_MC_LOCAL(uint32_t, u32Value);
10333 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10334 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10335 IEM_MC_ADVANCE_RIP();
10336 IEM_MC_END();
10337 break;
10338
10339 case IEMMODE_64BIT:
10340 IEM_MC_BEGIN(0, 1);
10341 IEM_MC_LOCAL(uint64_t, u64Value);
10342 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10343 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10344 IEM_MC_ADVANCE_RIP();
10345 IEM_MC_END();
10346 break;
10347 }
10348 }
10349 else
10350 {
10351 /*
10352 * We're loading a register from memory.
10353 */
10354 switch (pIemCpu->enmEffOpSize)
10355 {
10356 case IEMMODE_16BIT:
10357 IEM_MC_BEGIN(0, 2);
10358 IEM_MC_LOCAL(uint16_t, u16Value);
10359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10361 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10362 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10363 IEM_MC_ADVANCE_RIP();
10364 IEM_MC_END();
10365 break;
10366
10367 case IEMMODE_32BIT:
10368 IEM_MC_BEGIN(0, 2);
10369 IEM_MC_LOCAL(uint32_t, u32Value);
10370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10372 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10373 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10374 IEM_MC_ADVANCE_RIP();
10375 IEM_MC_END();
10376 break;
10377
10378 case IEMMODE_64BIT:
10379 IEM_MC_BEGIN(0, 2);
10380 IEM_MC_LOCAL(uint64_t, u64Value);
10381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10383 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10384 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10385 IEM_MC_ADVANCE_RIP();
10386 IEM_MC_END();
10387 break;
10388 }
10389 }
10390 return VINF_SUCCESS;
10391}
10392
10393
10394/** Opcode 0x63. */
10395FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10396{
10397 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10398 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10399 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10400 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10401 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10402}
10403
10404
10405/** Opcode 0x8c. */
10406FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10407{
10408 IEMOP_MNEMONIC("mov Ev,Sw");
10409
10410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10411 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10412
10413 /*
10414 * Check that the destination register exists. The REX.R prefix is ignored.
10415 */
10416 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10417 if ( iSegReg > X86_SREG_GS)
10418 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10419
10420 /*
10421 * If rm is denoting a register, no more instruction bytes.
10422 * In that case, the operand size is respected and the upper bits are
10423 * cleared (starting with some pentium).
10424 */
10425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10426 {
10427 switch (pIemCpu->enmEffOpSize)
10428 {
10429 case IEMMODE_16BIT:
10430 IEM_MC_BEGIN(0, 1);
10431 IEM_MC_LOCAL(uint16_t, u16Value);
10432 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10433 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10434 IEM_MC_ADVANCE_RIP();
10435 IEM_MC_END();
10436 break;
10437
10438 case IEMMODE_32BIT:
10439 IEM_MC_BEGIN(0, 1);
10440 IEM_MC_LOCAL(uint32_t, u32Value);
10441 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10442 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10443 IEM_MC_ADVANCE_RIP();
10444 IEM_MC_END();
10445 break;
10446
10447 case IEMMODE_64BIT:
10448 IEM_MC_BEGIN(0, 1);
10449 IEM_MC_LOCAL(uint64_t, u64Value);
10450 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10451 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10452 IEM_MC_ADVANCE_RIP();
10453 IEM_MC_END();
10454 break;
10455 }
10456 }
10457 else
10458 {
10459 /*
10460 * We're saving the register to memory. The access is word sized
10461 * regardless of operand size prefixes.
10462 */
10463#if 0 /* not necessary */
10464 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10465#endif
10466 IEM_MC_BEGIN(0, 2);
10467 IEM_MC_LOCAL(uint16_t, u16Value);
10468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10470 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10471 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10472 IEM_MC_ADVANCE_RIP();
10473 IEM_MC_END();
10474 }
10475 return VINF_SUCCESS;
10476}
10477
10478
10479
10480
10481/** Opcode 0x8d. */
10482FNIEMOP_DEF(iemOp_lea_Gv_M)
10483{
10484 IEMOP_MNEMONIC("lea Gv,M");
10485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10486 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10488 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10489
10490 switch (pIemCpu->enmEffOpSize)
10491 {
10492 case IEMMODE_16BIT:
10493 IEM_MC_BEGIN(0, 2);
10494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10495 IEM_MC_LOCAL(uint16_t, u16Cast);
10496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10497 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10498 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10499 IEM_MC_ADVANCE_RIP();
10500 IEM_MC_END();
10501 return VINF_SUCCESS;
10502
10503 case IEMMODE_32BIT:
10504 IEM_MC_BEGIN(0, 2);
10505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10506 IEM_MC_LOCAL(uint32_t, u32Cast);
10507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10508 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10509 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10510 IEM_MC_ADVANCE_RIP();
10511 IEM_MC_END();
10512 return VINF_SUCCESS;
10513
10514 case IEMMODE_64BIT:
10515 IEM_MC_BEGIN(0, 1);
10516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10518 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10519 IEM_MC_ADVANCE_RIP();
10520 IEM_MC_END();
10521 return VINF_SUCCESS;
10522 }
10523 AssertFailedReturn(VERR_IEM_IPE_7);
10524}
10525
10526
10527/** Opcode 0x8e. */
10528FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10529{
10530 IEMOP_MNEMONIC("mov Sw,Ev");
10531
10532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10533 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10534
10535 /*
10536 * The practical operand size is 16-bit.
10537 */
10538#if 0 /* not necessary */
10539 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10540#endif
10541
10542 /*
10543 * Check that the destination register exists and can be used with this
10544 * instruction. The REX.R prefix is ignored.
10545 */
10546 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10547 if ( iSegReg == X86_SREG_CS
10548 || iSegReg > X86_SREG_GS)
10549 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10550
10551 /*
10552 * If rm is denoting a register, no more instruction bytes.
10553 */
10554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10555 {
10556 IEM_MC_BEGIN(2, 0);
10557 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10558 IEM_MC_ARG(uint16_t, u16Value, 1);
10559 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10560 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10561 IEM_MC_END();
10562 }
10563 else
10564 {
10565 /*
10566 * We're loading the register from memory. The access is word sized
10567 * regardless of operand size prefixes.
10568 */
10569 IEM_MC_BEGIN(2, 1);
10570 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10571 IEM_MC_ARG(uint16_t, u16Value, 1);
10572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10574 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10575 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10576 IEM_MC_END();
10577 }
10578 return VINF_SUCCESS;
10579}
10580
10581
10582/** Opcode 0x8f /0. */
10583FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10584{
10585 /* This bugger is rather annoying as it requires rSP to be updated before
10586 doing the effective address calculations. Will eventually require a
10587 split between the R/M+SIB decoding and the effective address
10588 calculation - which is something that is required for any attempt at
10589 reusing this code for a recompiler. It may also be good to have if we
10590 need to delay #UD exception caused by invalid lock prefixes.
10591
10592 For now, we'll do a mostly safe interpreter-only implementation here. */
10593 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10594 * now until tests show it's checked.. */
10595 IEMOP_MNEMONIC("pop Ev");
10596 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10597
10598 /* Register access is relatively easy and can share code. */
10599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10600 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10601
10602 /*
10603 * Memory target.
10604 *
10605 * Intel says that RSP is incremented before it's used in any effective
10606 * address calcuations. This means some serious extra annoyance here since
10607 * we decode and calculate the effective address in one step and like to
10608 * delay committing registers till everything is done.
10609 *
10610 * So, we'll decode and calculate the effective address twice. This will
10611 * require some recoding if turned into a recompiler.
10612 */
10613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10614
10615#ifndef TST_IEM_CHECK_MC
10616 /* Calc effective address with modified ESP. */
10617 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10618 RTGCPTR GCPtrEff;
10619 VBOXSTRICTRC rcStrict;
10620 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10621 if (rcStrict != VINF_SUCCESS)
10622 return rcStrict;
10623 pIemCpu->offOpcode = offOpcodeSaved;
10624
10625 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10626 uint64_t const RspSaved = pCtx->rsp;
10627 switch (pIemCpu->enmEffOpSize)
10628 {
10629 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10630 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10631 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10633 }
10634 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10635 Assert(rcStrict == VINF_SUCCESS);
10636 pCtx->rsp = RspSaved;
10637
10638 /* Perform the operation - this should be CImpl. */
10639 RTUINT64U TmpRsp;
10640 TmpRsp.u = pCtx->rsp;
10641 switch (pIemCpu->enmEffOpSize)
10642 {
10643 case IEMMODE_16BIT:
10644 {
10645 uint16_t u16Value;
10646 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10647 if (rcStrict == VINF_SUCCESS)
10648 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10649 break;
10650 }
10651
10652 case IEMMODE_32BIT:
10653 {
10654 uint32_t u32Value;
10655 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10656 if (rcStrict == VINF_SUCCESS)
10657 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10658 break;
10659 }
10660
10661 case IEMMODE_64BIT:
10662 {
10663 uint64_t u64Value;
10664 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10665 if (rcStrict == VINF_SUCCESS)
10666 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10667 break;
10668 }
10669
10670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10671 }
10672 if (rcStrict == VINF_SUCCESS)
10673 {
10674 pCtx->rsp = TmpRsp.u;
10675 iemRegUpdateRipAndClearRF(pIemCpu);
10676 }
10677 return rcStrict;
10678
10679#else
10680 return VERR_IEM_IPE_2;
10681#endif
10682}
10683
10684
10685/** Opcode 0x8f. */
10686FNIEMOP_DEF(iemOp_Grp1A)
10687{
10688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10689 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10690 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10691
10692 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10693 /** @todo XOP decoding. */
10694 IEMOP_MNEMONIC("3-byte-xop");
10695 return IEMOP_RAISE_INVALID_OPCODE();
10696}
10697
10698
10699/**
10700 * Common 'xchg reg,rAX' helper.
10701 */
10702FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10703{
10704 IEMOP_HLP_NO_LOCK_PREFIX();
10705
10706 iReg |= pIemCpu->uRexB;
10707 switch (pIemCpu->enmEffOpSize)
10708 {
10709 case IEMMODE_16BIT:
10710 IEM_MC_BEGIN(0, 2);
10711 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10712 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10713 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10714 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10715 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10716 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10717 IEM_MC_ADVANCE_RIP();
10718 IEM_MC_END();
10719 return VINF_SUCCESS;
10720
10721 case IEMMODE_32BIT:
10722 IEM_MC_BEGIN(0, 2);
10723 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10724 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10725 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10726 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10727 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10728 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10729 IEM_MC_ADVANCE_RIP();
10730 IEM_MC_END();
10731 return VINF_SUCCESS;
10732
10733 case IEMMODE_64BIT:
10734 IEM_MC_BEGIN(0, 2);
10735 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10736 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10737 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10738 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10739 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10740 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10741 IEM_MC_ADVANCE_RIP();
10742 IEM_MC_END();
10743 return VINF_SUCCESS;
10744
10745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10746 }
10747}
10748
10749
10750/** Opcode 0x90. */
10751FNIEMOP_DEF(iemOp_nop)
10752{
10753 /* R8/R8D and RAX/EAX can be exchanged. */
10754 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10755 {
10756 IEMOP_MNEMONIC("xchg r8,rAX");
10757 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10758 }
10759
10760 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10761 IEMOP_MNEMONIC("pause");
10762 else
10763 IEMOP_MNEMONIC("nop");
10764 IEM_MC_BEGIN(0, 0);
10765 IEM_MC_ADVANCE_RIP();
10766 IEM_MC_END();
10767 return VINF_SUCCESS;
10768}
10769
10770
10771/** Opcode 0x91. */
10772FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10773{
10774 IEMOP_MNEMONIC("xchg rCX,rAX");
10775 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10776}
10777
10778
10779/** Opcode 0x92. */
10780FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10781{
10782 IEMOP_MNEMONIC("xchg rDX,rAX");
10783 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10784}
10785
10786
10787/** Opcode 0x93. */
10788FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10789{
10790 IEMOP_MNEMONIC("xchg rBX,rAX");
10791 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10792}
10793
10794
10795/** Opcode 0x94. */
10796FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10797{
10798 IEMOP_MNEMONIC("xchg rSX,rAX");
10799 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10800}
10801
10802
10803/** Opcode 0x95. */
10804FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10805{
10806 IEMOP_MNEMONIC("xchg rBP,rAX");
10807 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10808}
10809
10810
10811/** Opcode 0x96. */
10812FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10813{
10814 IEMOP_MNEMONIC("xchg rSI,rAX");
10815 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10816}
10817
10818
10819/** Opcode 0x97. */
10820FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10821{
10822 IEMOP_MNEMONIC("xchg rDI,rAX");
10823 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10824}
10825
10826
10827/** Opcode 0x98. */
10828FNIEMOP_DEF(iemOp_cbw)
10829{
10830 IEMOP_HLP_NO_LOCK_PREFIX();
10831 switch (pIemCpu->enmEffOpSize)
10832 {
10833 case IEMMODE_16BIT:
10834 IEMOP_MNEMONIC("cbw");
10835 IEM_MC_BEGIN(0, 1);
10836 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10837 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10838 } IEM_MC_ELSE() {
10839 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10840 } IEM_MC_ENDIF();
10841 IEM_MC_ADVANCE_RIP();
10842 IEM_MC_END();
10843 return VINF_SUCCESS;
10844
10845 case IEMMODE_32BIT:
10846 IEMOP_MNEMONIC("cwde");
10847 IEM_MC_BEGIN(0, 1);
10848 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10849 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10850 } IEM_MC_ELSE() {
10851 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10852 } IEM_MC_ENDIF();
10853 IEM_MC_ADVANCE_RIP();
10854 IEM_MC_END();
10855 return VINF_SUCCESS;
10856
10857 case IEMMODE_64BIT:
10858 IEMOP_MNEMONIC("cdqe");
10859 IEM_MC_BEGIN(0, 1);
10860 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10861 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10862 } IEM_MC_ELSE() {
10863 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10864 } IEM_MC_ENDIF();
10865 IEM_MC_ADVANCE_RIP();
10866 IEM_MC_END();
10867 return VINF_SUCCESS;
10868
10869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10870 }
10871}
10872
10873
10874/** Opcode 0x99. */
10875FNIEMOP_DEF(iemOp_cwd)
10876{
10877 IEMOP_HLP_NO_LOCK_PREFIX();
10878 switch (pIemCpu->enmEffOpSize)
10879 {
10880 case IEMMODE_16BIT:
10881 IEMOP_MNEMONIC("cwd");
10882 IEM_MC_BEGIN(0, 1);
10883 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10884 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10885 } IEM_MC_ELSE() {
10886 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10887 } IEM_MC_ENDIF();
10888 IEM_MC_ADVANCE_RIP();
10889 IEM_MC_END();
10890 return VINF_SUCCESS;
10891
10892 case IEMMODE_32BIT:
10893 IEMOP_MNEMONIC("cdq");
10894 IEM_MC_BEGIN(0, 1);
10895 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10896 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10897 } IEM_MC_ELSE() {
10898 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10899 } IEM_MC_ENDIF();
10900 IEM_MC_ADVANCE_RIP();
10901 IEM_MC_END();
10902 return VINF_SUCCESS;
10903
10904 case IEMMODE_64BIT:
10905 IEMOP_MNEMONIC("cqo");
10906 IEM_MC_BEGIN(0, 1);
10907 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10908 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10909 } IEM_MC_ELSE() {
10910 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10911 } IEM_MC_ENDIF();
10912 IEM_MC_ADVANCE_RIP();
10913 IEM_MC_END();
10914 return VINF_SUCCESS;
10915
10916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10917 }
10918}
10919
10920
10921/** Opcode 0x9a. */
10922FNIEMOP_DEF(iemOp_call_Ap)
10923{
10924 IEMOP_MNEMONIC("call Ap");
10925 IEMOP_HLP_NO_64BIT();
10926
10927 /* Decode the far pointer address and pass it on to the far call C implementation. */
10928 uint32_t offSeg;
10929 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10930 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10931 else
10932 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10933 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10935 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10936}
10937
10938
10939/** Opcode 0x9b. (aka fwait) */
10940FNIEMOP_DEF(iemOp_wait)
10941{
10942 IEMOP_MNEMONIC("wait");
10943 IEMOP_HLP_NO_LOCK_PREFIX();
10944
10945 IEM_MC_BEGIN(0, 0);
10946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10948 IEM_MC_ADVANCE_RIP();
10949 IEM_MC_END();
10950 return VINF_SUCCESS;
10951}
10952
10953
10954/** Opcode 0x9c. */
10955FNIEMOP_DEF(iemOp_pushf_Fv)
10956{
10957 IEMOP_HLP_NO_LOCK_PREFIX();
10958 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10959 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10960}
10961
10962
10963/** Opcode 0x9d. */
10964FNIEMOP_DEF(iemOp_popf_Fv)
10965{
10966 IEMOP_HLP_NO_LOCK_PREFIX();
10967 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10968 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10969}
10970
10971
10972/** Opcode 0x9e. */
10973FNIEMOP_DEF(iemOp_sahf)
10974{
10975 IEMOP_MNEMONIC("sahf");
10976 IEMOP_HLP_NO_LOCK_PREFIX();
10977 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10978 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10979 return IEMOP_RAISE_INVALID_OPCODE();
10980 IEM_MC_BEGIN(0, 2);
10981 IEM_MC_LOCAL(uint32_t, u32Flags);
10982 IEM_MC_LOCAL(uint32_t, EFlags);
10983 IEM_MC_FETCH_EFLAGS(EFlags);
10984 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10985 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10986 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10987 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10988 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10989 IEM_MC_COMMIT_EFLAGS(EFlags);
10990 IEM_MC_ADVANCE_RIP();
10991 IEM_MC_END();
10992 return VINF_SUCCESS;
10993}
10994
10995
10996/** Opcode 0x9f. */
10997FNIEMOP_DEF(iemOp_lahf)
10998{
10999 IEMOP_MNEMONIC("lahf");
11000 IEMOP_HLP_NO_LOCK_PREFIX();
11001 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11002 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11003 return IEMOP_RAISE_INVALID_OPCODE();
11004 IEM_MC_BEGIN(0, 1);
11005 IEM_MC_LOCAL(uint8_t, u8Flags);
11006 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11007 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11008 IEM_MC_ADVANCE_RIP();
11009 IEM_MC_END();
11010 return VINF_SUCCESS;
11011}
11012
11013
11014/**
11015 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11016 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11017 * prefixes. Will return on failures.
11018 * @param a_GCPtrMemOff The variable to store the offset in.
11019 */
11020#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11021 do \
11022 { \
11023 switch (pIemCpu->enmEffAddrMode) \
11024 { \
11025 case IEMMODE_16BIT: \
11026 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11027 break; \
11028 case IEMMODE_32BIT: \
11029 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11030 break; \
11031 case IEMMODE_64BIT: \
11032 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11033 break; \
11034 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11035 } \
11036 IEMOP_HLP_NO_LOCK_PREFIX(); \
11037 } while (0)
11038
11039/** Opcode 0xa0. */
11040FNIEMOP_DEF(iemOp_mov_Al_Ob)
11041{
11042 /*
11043 * Get the offset and fend of lock prefixes.
11044 */
11045 RTGCPTR GCPtrMemOff;
11046 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11047
11048 /*
11049 * Fetch AL.
11050 */
11051 IEM_MC_BEGIN(0,1);
11052 IEM_MC_LOCAL(uint8_t, u8Tmp);
11053 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11054 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11055 IEM_MC_ADVANCE_RIP();
11056 IEM_MC_END();
11057 return VINF_SUCCESS;
11058}
11059
11060
11061/** Opcode 0xa1. */
11062FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11063{
11064 /*
11065 * Get the offset and fend of lock prefixes.
11066 */
11067 IEMOP_MNEMONIC("mov rAX,Ov");
11068 RTGCPTR GCPtrMemOff;
11069 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11070
11071 /*
11072 * Fetch rAX.
11073 */
11074 switch (pIemCpu->enmEffOpSize)
11075 {
11076 case IEMMODE_16BIT:
11077 IEM_MC_BEGIN(0,1);
11078 IEM_MC_LOCAL(uint16_t, u16Tmp);
11079 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11080 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11081 IEM_MC_ADVANCE_RIP();
11082 IEM_MC_END();
11083 return VINF_SUCCESS;
11084
11085 case IEMMODE_32BIT:
11086 IEM_MC_BEGIN(0,1);
11087 IEM_MC_LOCAL(uint32_t, u32Tmp);
11088 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11089 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11090 IEM_MC_ADVANCE_RIP();
11091 IEM_MC_END();
11092 return VINF_SUCCESS;
11093
11094 case IEMMODE_64BIT:
11095 IEM_MC_BEGIN(0,1);
11096 IEM_MC_LOCAL(uint64_t, u64Tmp);
11097 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11098 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11099 IEM_MC_ADVANCE_RIP();
11100 IEM_MC_END();
11101 return VINF_SUCCESS;
11102
11103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11104 }
11105}
11106
11107
11108/** Opcode 0xa2. */
11109FNIEMOP_DEF(iemOp_mov_Ob_AL)
11110{
11111 /*
11112 * Get the offset and fend of lock prefixes.
11113 */
11114 RTGCPTR GCPtrMemOff;
11115 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11116
11117 /*
11118 * Store AL.
11119 */
11120 IEM_MC_BEGIN(0,1);
11121 IEM_MC_LOCAL(uint8_t, u8Tmp);
11122 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11123 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
11124 IEM_MC_ADVANCE_RIP();
11125 IEM_MC_END();
11126 return VINF_SUCCESS;
11127}
11128
11129
11130/** Opcode 0xa3. */
11131FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11132{
11133 /*
11134 * Get the offset and fend of lock prefixes.
11135 */
11136 RTGCPTR GCPtrMemOff;
11137 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11138
11139 /*
11140 * Store rAX.
11141 */
11142 switch (pIemCpu->enmEffOpSize)
11143 {
11144 case IEMMODE_16BIT:
11145 IEM_MC_BEGIN(0,1);
11146 IEM_MC_LOCAL(uint16_t, u16Tmp);
11147 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11148 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
11149 IEM_MC_ADVANCE_RIP();
11150 IEM_MC_END();
11151 return VINF_SUCCESS;
11152
11153 case IEMMODE_32BIT:
11154 IEM_MC_BEGIN(0,1);
11155 IEM_MC_LOCAL(uint32_t, u32Tmp);
11156 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11157 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11158 IEM_MC_ADVANCE_RIP();
11159 IEM_MC_END();
11160 return VINF_SUCCESS;
11161
11162 case IEMMODE_64BIT:
11163 IEM_MC_BEGIN(0,1);
11164 IEM_MC_LOCAL(uint64_t, u64Tmp);
11165 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11166 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11167 IEM_MC_ADVANCE_RIP();
11168 IEM_MC_END();
11169 return VINF_SUCCESS;
11170
11171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11172 }
11173}
11174
11175/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11176#define IEM_MOVS_CASE(ValBits, AddrBits) \
11177 IEM_MC_BEGIN(0, 2); \
11178 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11179 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11180 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11181 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11182 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11183 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11184 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11185 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11186 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11187 } IEM_MC_ELSE() { \
11188 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11189 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11190 } IEM_MC_ENDIF(); \
11191 IEM_MC_ADVANCE_RIP(); \
11192 IEM_MC_END();
11193
11194/** Opcode 0xa4. */
11195FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11196{
11197 IEMOP_HLP_NO_LOCK_PREFIX();
11198
11199 /*
11200 * Use the C implementation if a repeat prefix is encountered.
11201 */
11202 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11203 {
11204 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11205 switch (pIemCpu->enmEffAddrMode)
11206 {
11207 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11208 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11209 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11211 }
11212 }
11213 IEMOP_MNEMONIC("movsb Xb,Yb");
11214
11215 /*
11216 * Sharing case implementation with movs[wdq] below.
11217 */
11218 switch (pIemCpu->enmEffAddrMode)
11219 {
11220 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11221 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11222 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11224 }
11225 return VINF_SUCCESS;
11226}
11227
11228
11229/** Opcode 0xa5. */
11230FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11231{
11232 IEMOP_HLP_NO_LOCK_PREFIX();
11233
11234 /*
11235 * Use the C implementation if a repeat prefix is encountered.
11236 */
11237 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11238 {
11239 IEMOP_MNEMONIC("rep movs Xv,Yv");
11240 switch (pIemCpu->enmEffOpSize)
11241 {
11242 case IEMMODE_16BIT:
11243 switch (pIemCpu->enmEffAddrMode)
11244 {
11245 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11246 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11247 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11249 }
11250 break;
11251 case IEMMODE_32BIT:
11252 switch (pIemCpu->enmEffAddrMode)
11253 {
11254 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11255 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11256 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11258 }
11259 case IEMMODE_64BIT:
11260 switch (pIemCpu->enmEffAddrMode)
11261 {
11262 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11263 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11264 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11266 }
11267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11268 }
11269 }
11270 IEMOP_MNEMONIC("movs Xv,Yv");
11271
11272 /*
11273 * Annoying double switch here.
11274 * Using ugly macro for implementing the cases, sharing it with movsb.
11275 */
11276 switch (pIemCpu->enmEffOpSize)
11277 {
11278 case IEMMODE_16BIT:
11279 switch (pIemCpu->enmEffAddrMode)
11280 {
11281 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11282 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11283 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11285 }
11286 break;
11287
11288 case IEMMODE_32BIT:
11289 switch (pIemCpu->enmEffAddrMode)
11290 {
11291 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11292 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11293 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11295 }
11296 break;
11297
11298 case IEMMODE_64BIT:
11299 switch (pIemCpu->enmEffAddrMode)
11300 {
11301 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11302 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11303 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11305 }
11306 break;
11307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11308 }
11309 return VINF_SUCCESS;
11310}
11311
11312#undef IEM_MOVS_CASE
11313
11314/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11315#define IEM_CMPS_CASE(ValBits, AddrBits) \
11316 IEM_MC_BEGIN(3, 3); \
11317 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11318 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11319 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11320 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11321 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11322 \
11323 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11324 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11325 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11326 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11327 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11328 IEM_MC_REF_EFLAGS(pEFlags); \
11329 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11330 \
11331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11332 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11333 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11334 } IEM_MC_ELSE() { \
11335 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11336 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11337 } IEM_MC_ENDIF(); \
11338 IEM_MC_ADVANCE_RIP(); \
11339 IEM_MC_END(); \
11340
11341/** Opcode 0xa6. */
11342FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11343{
11344 IEMOP_HLP_NO_LOCK_PREFIX();
11345
11346 /*
11347 * Use the C implementation if a repeat prefix is encountered.
11348 */
11349 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11350 {
11351 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11352 switch (pIemCpu->enmEffAddrMode)
11353 {
11354 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11355 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11356 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11358 }
11359 }
11360 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11361 {
11362 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11363 switch (pIemCpu->enmEffAddrMode)
11364 {
11365 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11366 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11367 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11369 }
11370 }
11371 IEMOP_MNEMONIC("cmps Xb,Yb");
11372
11373 /*
11374 * Sharing case implementation with cmps[wdq] below.
11375 */
11376 switch (pIemCpu->enmEffAddrMode)
11377 {
11378 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11379 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11380 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11382 }
11383 return VINF_SUCCESS;
11384
11385}
11386
11387
11388/** Opcode 0xa7. */
11389FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11390{
11391 IEMOP_HLP_NO_LOCK_PREFIX();
11392
11393 /*
11394 * Use the C implementation if a repeat prefix is encountered.
11395 */
11396 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11397 {
11398 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11399 switch (pIemCpu->enmEffOpSize)
11400 {
11401 case IEMMODE_16BIT:
11402 switch (pIemCpu->enmEffAddrMode)
11403 {
11404 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11405 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11406 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11408 }
11409 break;
11410 case IEMMODE_32BIT:
11411 switch (pIemCpu->enmEffAddrMode)
11412 {
11413 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11414 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11415 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11417 }
11418 case IEMMODE_64BIT:
11419 switch (pIemCpu->enmEffAddrMode)
11420 {
11421 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11422 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11423 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11425 }
11426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11427 }
11428 }
11429
11430 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11431 {
11432 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11433 switch (pIemCpu->enmEffOpSize)
11434 {
11435 case IEMMODE_16BIT:
11436 switch (pIemCpu->enmEffAddrMode)
11437 {
11438 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11439 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11440 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11442 }
11443 break;
11444 case IEMMODE_32BIT:
11445 switch (pIemCpu->enmEffAddrMode)
11446 {
11447 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11448 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11449 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11451 }
11452 case IEMMODE_64BIT:
11453 switch (pIemCpu->enmEffAddrMode)
11454 {
11455 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11461 }
11462 }
11463
11464 IEMOP_MNEMONIC("cmps Xv,Yv");
11465
11466 /*
11467 * Annoying double switch here.
11468 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11469 */
11470 switch (pIemCpu->enmEffOpSize)
11471 {
11472 case IEMMODE_16BIT:
11473 switch (pIemCpu->enmEffAddrMode)
11474 {
11475 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11476 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11477 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11479 }
11480 break;
11481
11482 case IEMMODE_32BIT:
11483 switch (pIemCpu->enmEffAddrMode)
11484 {
11485 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11486 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11487 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11489 }
11490 break;
11491
11492 case IEMMODE_64BIT:
11493 switch (pIemCpu->enmEffAddrMode)
11494 {
11495 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11496 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11497 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11499 }
11500 break;
11501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11502 }
11503 return VINF_SUCCESS;
11504
11505}
11506
11507#undef IEM_CMPS_CASE
11508
11509/** Opcode 0xa8. */
11510FNIEMOP_DEF(iemOp_test_AL_Ib)
11511{
11512 IEMOP_MNEMONIC("test al,Ib");
11513 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11514 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11515}
11516
11517
11518/** Opcode 0xa9. */
11519FNIEMOP_DEF(iemOp_test_eAX_Iz)
11520{
11521 IEMOP_MNEMONIC("test rAX,Iz");
11522 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11524}
11525
11526
11527/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11528#define IEM_STOS_CASE(ValBits, AddrBits) \
11529 IEM_MC_BEGIN(0, 2); \
11530 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11531 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11532 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11533 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11534 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11536 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11537 } IEM_MC_ELSE() { \
11538 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11539 } IEM_MC_ENDIF(); \
11540 IEM_MC_ADVANCE_RIP(); \
11541 IEM_MC_END(); \
11542
11543/** Opcode 0xaa. */
11544FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11545{
11546 IEMOP_HLP_NO_LOCK_PREFIX();
11547
11548 /*
11549 * Use the C implementation if a repeat prefix is encountered.
11550 */
11551 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11552 {
11553 IEMOP_MNEMONIC("rep stos Yb,al");
11554 switch (pIemCpu->enmEffAddrMode)
11555 {
11556 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11557 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11558 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11560 }
11561 }
11562 IEMOP_MNEMONIC("stos Yb,al");
11563
11564 /*
11565 * Sharing case implementation with stos[wdq] below.
11566 */
11567 switch (pIemCpu->enmEffAddrMode)
11568 {
11569 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11570 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11571 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11573 }
11574 return VINF_SUCCESS;
11575}
11576
11577
11578/** Opcode 0xab. */
11579FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11580{
11581 IEMOP_HLP_NO_LOCK_PREFIX();
11582
11583 /*
11584 * Use the C implementation if a repeat prefix is encountered.
11585 */
11586 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11587 {
11588 IEMOP_MNEMONIC("rep stos Yv,rAX");
11589 switch (pIemCpu->enmEffOpSize)
11590 {
11591 case IEMMODE_16BIT:
11592 switch (pIemCpu->enmEffAddrMode)
11593 {
11594 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11595 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11596 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11598 }
11599 break;
11600 case IEMMODE_32BIT:
11601 switch (pIemCpu->enmEffAddrMode)
11602 {
11603 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11604 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11605 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11607 }
11608 case IEMMODE_64BIT:
11609 switch (pIemCpu->enmEffAddrMode)
11610 {
11611 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11612 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11613 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11615 }
11616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11617 }
11618 }
11619 IEMOP_MNEMONIC("stos Yv,rAX");
11620
11621 /*
11622 * Annoying double switch here.
11623 * Using ugly macro for implementing the cases, sharing it with stosb.
11624 */
11625 switch (pIemCpu->enmEffOpSize)
11626 {
11627 case IEMMODE_16BIT:
11628 switch (pIemCpu->enmEffAddrMode)
11629 {
11630 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11631 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11632 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11634 }
11635 break;
11636
11637 case IEMMODE_32BIT:
11638 switch (pIemCpu->enmEffAddrMode)
11639 {
11640 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11641 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11642 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11644 }
11645 break;
11646
11647 case IEMMODE_64BIT:
11648 switch (pIemCpu->enmEffAddrMode)
11649 {
11650 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11651 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11652 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11654 }
11655 break;
11656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11657 }
11658 return VINF_SUCCESS;
11659}
11660
11661#undef IEM_STOS_CASE
11662
11663/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11664#define IEM_LODS_CASE(ValBits, AddrBits) \
11665 IEM_MC_BEGIN(0, 2); \
11666 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11667 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11668 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11669 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11670 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11672 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11673 } IEM_MC_ELSE() { \
11674 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11675 } IEM_MC_ENDIF(); \
11676 IEM_MC_ADVANCE_RIP(); \
11677 IEM_MC_END();
11678
11679/** Opcode 0xac. */
11680FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11681{
11682 IEMOP_HLP_NO_LOCK_PREFIX();
11683
11684 /*
11685 * Use the C implementation if a repeat prefix is encountered.
11686 */
11687 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11688 {
11689 IEMOP_MNEMONIC("rep lodsb al,Xb");
11690 switch (pIemCpu->enmEffAddrMode)
11691 {
11692 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11693 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11694 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11696 }
11697 }
11698 IEMOP_MNEMONIC("lodsb al,Xb");
11699
11700 /*
11701 * Sharing case implementation with stos[wdq] below.
11702 */
11703 switch (pIemCpu->enmEffAddrMode)
11704 {
11705 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11706 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11707 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11709 }
11710 return VINF_SUCCESS;
11711}
11712
11713
11714/** Opcode 0xad. */
11715FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11716{
11717 IEMOP_HLP_NO_LOCK_PREFIX();
11718
11719 /*
11720 * Use the C implementation if a repeat prefix is encountered.
11721 */
11722 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11723 {
11724 IEMOP_MNEMONIC("rep lods rAX,Xv");
11725 switch (pIemCpu->enmEffOpSize)
11726 {
11727 case IEMMODE_16BIT:
11728 switch (pIemCpu->enmEffAddrMode)
11729 {
11730 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11731 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11732 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11734 }
11735 break;
11736 case IEMMODE_32BIT:
11737 switch (pIemCpu->enmEffAddrMode)
11738 {
11739 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11740 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11741 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11743 }
11744 case IEMMODE_64BIT:
11745 switch (pIemCpu->enmEffAddrMode)
11746 {
11747 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11748 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11749 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11751 }
11752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11753 }
11754 }
11755 IEMOP_MNEMONIC("lods rAX,Xv");
11756
11757 /*
11758 * Annoying double switch here.
11759 * Using ugly macro for implementing the cases, sharing it with lodsb.
11760 */
11761 switch (pIemCpu->enmEffOpSize)
11762 {
11763 case IEMMODE_16BIT:
11764 switch (pIemCpu->enmEffAddrMode)
11765 {
11766 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11767 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11768 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11770 }
11771 break;
11772
11773 case IEMMODE_32BIT:
11774 switch (pIemCpu->enmEffAddrMode)
11775 {
11776 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11777 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11778 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11780 }
11781 break;
11782
11783 case IEMMODE_64BIT:
11784 switch (pIemCpu->enmEffAddrMode)
11785 {
11786 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11787 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11788 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11790 }
11791 break;
11792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11793 }
11794 return VINF_SUCCESS;
11795}
11796
11797#undef IEM_LODS_CASE
11798
11799/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11800#define IEM_SCAS_CASE(ValBits, AddrBits) \
11801 IEM_MC_BEGIN(3, 2); \
11802 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11803 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11804 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11805 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11806 \
11807 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11808 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11809 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11810 IEM_MC_REF_EFLAGS(pEFlags); \
11811 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11812 \
11813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11814 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11815 } IEM_MC_ELSE() { \
11816 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11817 } IEM_MC_ENDIF(); \
11818 IEM_MC_ADVANCE_RIP(); \
11819 IEM_MC_END();
11820
11821/** Opcode 0xae. */
11822FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11823{
11824 IEMOP_HLP_NO_LOCK_PREFIX();
11825
11826 /*
11827 * Use the C implementation if a repeat prefix is encountered.
11828 */
11829 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11830 {
11831 IEMOP_MNEMONIC("repe scasb al,Xb");
11832 switch (pIemCpu->enmEffAddrMode)
11833 {
11834 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11835 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11836 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11838 }
11839 }
11840 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11841 {
11842 IEMOP_MNEMONIC("repne scasb al,Xb");
11843 switch (pIemCpu->enmEffAddrMode)
11844 {
11845 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11846 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11847 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11849 }
11850 }
11851 IEMOP_MNEMONIC("scasb al,Xb");
11852
11853 /*
11854 * Sharing case implementation with stos[wdq] below.
11855 */
11856 switch (pIemCpu->enmEffAddrMode)
11857 {
11858 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11859 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11860 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11862 }
11863 return VINF_SUCCESS;
11864}
11865
11866
11867/** Opcode 0xaf. */
11868FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11869{
11870 IEMOP_HLP_NO_LOCK_PREFIX();
11871
11872 /*
11873 * Use the C implementation if a repeat prefix is encountered.
11874 */
11875 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11876 {
11877 IEMOP_MNEMONIC("repe scas rAX,Xv");
11878 switch (pIemCpu->enmEffOpSize)
11879 {
11880 case IEMMODE_16BIT:
11881 switch (pIemCpu->enmEffAddrMode)
11882 {
11883 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11884 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11885 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11887 }
11888 break;
11889 case IEMMODE_32BIT:
11890 switch (pIemCpu->enmEffAddrMode)
11891 {
11892 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11893 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11894 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11896 }
11897 case IEMMODE_64BIT:
11898 switch (pIemCpu->enmEffAddrMode)
11899 {
11900 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11901 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11902 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11904 }
11905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11906 }
11907 }
11908 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11909 {
11910 IEMOP_MNEMONIC("repne scas rAX,Xv");
11911 switch (pIemCpu->enmEffOpSize)
11912 {
11913 case IEMMODE_16BIT:
11914 switch (pIemCpu->enmEffAddrMode)
11915 {
11916 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11917 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11918 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11920 }
11921 break;
11922 case IEMMODE_32BIT:
11923 switch (pIemCpu->enmEffAddrMode)
11924 {
11925 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11926 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11927 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11929 }
11930 case IEMMODE_64BIT:
11931 switch (pIemCpu->enmEffAddrMode)
11932 {
11933 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11934 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11935 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11937 }
11938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11939 }
11940 }
11941 IEMOP_MNEMONIC("scas rAX,Xv");
11942
11943 /*
11944 * Annoying double switch here.
11945 * Using ugly macro for implementing the cases, sharing it with scasb.
11946 */
11947 switch (pIemCpu->enmEffOpSize)
11948 {
11949 case IEMMODE_16BIT:
11950 switch (pIemCpu->enmEffAddrMode)
11951 {
11952 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11953 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11954 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11956 }
11957 break;
11958
11959 case IEMMODE_32BIT:
11960 switch (pIemCpu->enmEffAddrMode)
11961 {
11962 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11963 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11964 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11966 }
11967 break;
11968
11969 case IEMMODE_64BIT:
11970 switch (pIemCpu->enmEffAddrMode)
11971 {
11972 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11973 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11974 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11976 }
11977 break;
11978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11979 }
11980 return VINF_SUCCESS;
11981}
11982
11983#undef IEM_SCAS_CASE
11984
11985/**
11986 * Common 'mov r8, imm8' helper.
11987 */
11988FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11989{
11990 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11991 IEMOP_HLP_NO_LOCK_PREFIX();
11992
11993 IEM_MC_BEGIN(0, 1);
11994 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11995 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11996 IEM_MC_ADVANCE_RIP();
11997 IEM_MC_END();
11998
11999 return VINF_SUCCESS;
12000}
12001
12002
12003/** Opcode 0xb0. */
12004FNIEMOP_DEF(iemOp_mov_AL_Ib)
12005{
12006 IEMOP_MNEMONIC("mov AL,Ib");
12007 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
12008}
12009
12010
12011/** Opcode 0xb1. */
12012FNIEMOP_DEF(iemOp_CL_Ib)
12013{
12014 IEMOP_MNEMONIC("mov CL,Ib");
12015 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
12016}
12017
12018
12019/** Opcode 0xb2. */
12020FNIEMOP_DEF(iemOp_DL_Ib)
12021{
12022 IEMOP_MNEMONIC("mov DL,Ib");
12023 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
12024}
12025
12026
12027/** Opcode 0xb3. */
12028FNIEMOP_DEF(iemOp_BL_Ib)
12029{
12030 IEMOP_MNEMONIC("mov BL,Ib");
12031 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
12032}
12033
12034
12035/** Opcode 0xb4. */
12036FNIEMOP_DEF(iemOp_mov_AH_Ib)
12037{
12038 IEMOP_MNEMONIC("mov AH,Ib");
12039 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
12040}
12041
12042
12043/** Opcode 0xb5. */
12044FNIEMOP_DEF(iemOp_CH_Ib)
12045{
12046 IEMOP_MNEMONIC("mov CH,Ib");
12047 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
12048}
12049
12050
12051/** Opcode 0xb6. */
12052FNIEMOP_DEF(iemOp_DH_Ib)
12053{
12054 IEMOP_MNEMONIC("mov DH,Ib");
12055 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
12056}
12057
12058
12059/** Opcode 0xb7. */
12060FNIEMOP_DEF(iemOp_BH_Ib)
12061{
12062 IEMOP_MNEMONIC("mov BH,Ib");
12063 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
12064}
12065
12066
12067/**
12068 * Common 'mov regX,immX' helper.
12069 */
12070FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12071{
12072 switch (pIemCpu->enmEffOpSize)
12073 {
12074 case IEMMODE_16BIT:
12075 {
12076 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12077 IEMOP_HLP_NO_LOCK_PREFIX();
12078
12079 IEM_MC_BEGIN(0, 1);
12080 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12081 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12082 IEM_MC_ADVANCE_RIP();
12083 IEM_MC_END();
12084 break;
12085 }
12086
12087 case IEMMODE_32BIT:
12088 {
12089 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12090 IEMOP_HLP_NO_LOCK_PREFIX();
12091
12092 IEM_MC_BEGIN(0, 1);
12093 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12094 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12095 IEM_MC_ADVANCE_RIP();
12096 IEM_MC_END();
12097 break;
12098 }
12099 case IEMMODE_64BIT:
12100 {
12101 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12102 IEMOP_HLP_NO_LOCK_PREFIX();
12103
12104 IEM_MC_BEGIN(0, 1);
12105 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12106 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12107 IEM_MC_ADVANCE_RIP();
12108 IEM_MC_END();
12109 break;
12110 }
12111 }
12112
12113 return VINF_SUCCESS;
12114}
12115
12116
12117/** Opcode 0xb8. */
12118FNIEMOP_DEF(iemOp_eAX_Iv)
12119{
12120 IEMOP_MNEMONIC("mov rAX,IV");
12121 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
12122}
12123
12124
12125/** Opcode 0xb9. */
12126FNIEMOP_DEF(iemOp_eCX_Iv)
12127{
12128 IEMOP_MNEMONIC("mov rCX,IV");
12129 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
12130}
12131
12132
12133/** Opcode 0xba. */
12134FNIEMOP_DEF(iemOp_eDX_Iv)
12135{
12136 IEMOP_MNEMONIC("mov rDX,IV");
12137 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
12138}
12139
12140
12141/** Opcode 0xbb. */
12142FNIEMOP_DEF(iemOp_eBX_Iv)
12143{
12144 IEMOP_MNEMONIC("mov rBX,IV");
12145 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
12146}
12147
12148
12149/** Opcode 0xbc. */
12150FNIEMOP_DEF(iemOp_eSP_Iv)
12151{
12152 IEMOP_MNEMONIC("mov rSP,IV");
12153 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12154}
12155
12156
12157/** Opcode 0xbd. */
12158FNIEMOP_DEF(iemOp_eBP_Iv)
12159{
12160 IEMOP_MNEMONIC("mov rBP,IV");
12161 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12162}
12163
12164
12165/** Opcode 0xbe. */
12166FNIEMOP_DEF(iemOp_eSI_Iv)
12167{
12168 IEMOP_MNEMONIC("mov rSI,IV");
12169 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12170}
12171
12172
12173/** Opcode 0xbf. */
12174FNIEMOP_DEF(iemOp_eDI_Iv)
12175{
12176 IEMOP_MNEMONIC("mov rDI,IV");
12177 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12178}
12179
12180
12181/** Opcode 0xc0. */
12182FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12183{
12184 IEMOP_HLP_MIN_186();
12185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12186 PCIEMOPSHIFTSIZES pImpl;
12187 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12188 {
12189 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12190 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12191 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12192 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12193 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12194 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12195 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12196 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12197 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12198 }
12199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12200
12201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12202 {
12203 /* register */
12204 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12205 IEMOP_HLP_NO_LOCK_PREFIX();
12206 IEM_MC_BEGIN(3, 0);
12207 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12208 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12210 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12211 IEM_MC_REF_EFLAGS(pEFlags);
12212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12213 IEM_MC_ADVANCE_RIP();
12214 IEM_MC_END();
12215 }
12216 else
12217 {
12218 /* memory */
12219 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12220 IEM_MC_BEGIN(3, 2);
12221 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12222 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12225
12226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12227 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12228 IEM_MC_ASSIGN(cShiftArg, cShift);
12229 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12230 IEM_MC_FETCH_EFLAGS(EFlags);
12231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12232
12233 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12234 IEM_MC_COMMIT_EFLAGS(EFlags);
12235 IEM_MC_ADVANCE_RIP();
12236 IEM_MC_END();
12237 }
12238 return VINF_SUCCESS;
12239}
12240
12241
12242/** Opcode 0xc1. */
12243FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12244{
12245 IEMOP_HLP_MIN_186();
12246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12247 PCIEMOPSHIFTSIZES pImpl;
12248 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12249 {
12250 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12251 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12252 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12253 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12254 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12255 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12256 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12257 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12258 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12259 }
12260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12261
12262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12263 {
12264 /* register */
12265 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12266 IEMOP_HLP_NO_LOCK_PREFIX();
12267 switch (pIemCpu->enmEffOpSize)
12268 {
12269 case IEMMODE_16BIT:
12270 IEM_MC_BEGIN(3, 0);
12271 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12272 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12273 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12274 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12275 IEM_MC_REF_EFLAGS(pEFlags);
12276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12277 IEM_MC_ADVANCE_RIP();
12278 IEM_MC_END();
12279 return VINF_SUCCESS;
12280
12281 case IEMMODE_32BIT:
12282 IEM_MC_BEGIN(3, 0);
12283 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12284 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12286 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12287 IEM_MC_REF_EFLAGS(pEFlags);
12288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12289 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12290 IEM_MC_ADVANCE_RIP();
12291 IEM_MC_END();
12292 return VINF_SUCCESS;
12293
12294 case IEMMODE_64BIT:
12295 IEM_MC_BEGIN(3, 0);
12296 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12297 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12298 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12299 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12300 IEM_MC_REF_EFLAGS(pEFlags);
12301 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12302 IEM_MC_ADVANCE_RIP();
12303 IEM_MC_END();
12304 return VINF_SUCCESS;
12305
12306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12307 }
12308 }
12309 else
12310 {
12311 /* memory */
12312 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12313 switch (pIemCpu->enmEffOpSize)
12314 {
12315 case IEMMODE_16BIT:
12316 IEM_MC_BEGIN(3, 2);
12317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12318 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12321
12322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12323 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12324 IEM_MC_ASSIGN(cShiftArg, cShift);
12325 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12326 IEM_MC_FETCH_EFLAGS(EFlags);
12327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12328
12329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12330 IEM_MC_COMMIT_EFLAGS(EFlags);
12331 IEM_MC_ADVANCE_RIP();
12332 IEM_MC_END();
12333 return VINF_SUCCESS;
12334
12335 case IEMMODE_32BIT:
12336 IEM_MC_BEGIN(3, 2);
12337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12338 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12339 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12341
12342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12343 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12344 IEM_MC_ASSIGN(cShiftArg, cShift);
12345 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12346 IEM_MC_FETCH_EFLAGS(EFlags);
12347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12348
12349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12350 IEM_MC_COMMIT_EFLAGS(EFlags);
12351 IEM_MC_ADVANCE_RIP();
12352 IEM_MC_END();
12353 return VINF_SUCCESS;
12354
12355 case IEMMODE_64BIT:
12356 IEM_MC_BEGIN(3, 2);
12357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12358 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12359 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12361
12362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12363 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12364 IEM_MC_ASSIGN(cShiftArg, cShift);
12365 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12366 IEM_MC_FETCH_EFLAGS(EFlags);
12367 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12368
12369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12370 IEM_MC_COMMIT_EFLAGS(EFlags);
12371 IEM_MC_ADVANCE_RIP();
12372 IEM_MC_END();
12373 return VINF_SUCCESS;
12374
12375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12376 }
12377 }
12378}
12379
12380
12381/** Opcode 0xc2. */
12382FNIEMOP_DEF(iemOp_retn_Iw)
12383{
12384 IEMOP_MNEMONIC("retn Iw");
12385 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12386 IEMOP_HLP_NO_LOCK_PREFIX();
12387 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12388 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12389}
12390
12391
12392/** Opcode 0xc3. */
12393FNIEMOP_DEF(iemOp_retn)
12394{
12395 IEMOP_MNEMONIC("retn");
12396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12397 IEMOP_HLP_NO_LOCK_PREFIX();
12398 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12399}
12400
12401
12402/** Opcode 0xc4. */
12403FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12404{
12405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12406 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12407 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12408 {
12409 IEMOP_MNEMONIC("2-byte-vex");
12410 /* The LES instruction is invalid 64-bit mode. In legacy and
12411 compatability mode it is invalid with MOD=3.
12412 The use as a VEX prefix is made possible by assigning the inverted
12413 REX.R to the top MOD bit, and the top bit in the inverted register
12414 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12415 to accessing registers 0..7 in this VEX form. */
12416 /** @todo VEX: Just use new tables for it. */
12417 return IEMOP_RAISE_INVALID_OPCODE();
12418 }
12419 IEMOP_MNEMONIC("les Gv,Mp");
12420 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12421}
12422
12423
12424/** Opcode 0xc5. */
12425FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12426{
12427 /* The LDS instruction is invalid 64-bit mode. In legacy and
12428 compatability mode it is invalid with MOD=3.
12429 The use as a VEX prefix is made possible by assigning the inverted
12430 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12431 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12433 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12434 {
12435 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12436 {
12437 IEMOP_MNEMONIC("lds Gv,Mp");
12438 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12439 }
12440 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12441 }
12442
12443 IEMOP_MNEMONIC("3-byte-vex");
12444 /** @todo Test when exctly the VEX conformance checks kick in during
12445 * instruction decoding and fetching (using \#PF). */
12446 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12447 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12448 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12449#if 0 /* will make sense of this next week... */
12450 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12451 &&
12452 )
12453 {
12454
12455 }
12456#endif
12457
12458 /** @todo VEX: Just use new tables for it. */
12459 return IEMOP_RAISE_INVALID_OPCODE();
12460}
12461
12462
12463/** Opcode 0xc6. */
12464FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12465{
12466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12467 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12468 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12469 return IEMOP_RAISE_INVALID_OPCODE();
12470 IEMOP_MNEMONIC("mov Eb,Ib");
12471
12472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12473 {
12474 /* register access */
12475 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12476 IEM_MC_BEGIN(0, 0);
12477 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12478 IEM_MC_ADVANCE_RIP();
12479 IEM_MC_END();
12480 }
12481 else
12482 {
12483 /* memory access. */
12484 IEM_MC_BEGIN(0, 1);
12485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12487 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12488 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12489 IEM_MC_ADVANCE_RIP();
12490 IEM_MC_END();
12491 }
12492 return VINF_SUCCESS;
12493}
12494
12495
12496/** Opcode 0xc7. */
12497FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12498{
12499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12500 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12501 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12502 return IEMOP_RAISE_INVALID_OPCODE();
12503 IEMOP_MNEMONIC("mov Ev,Iz");
12504
12505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12506 {
12507 /* register access */
12508 switch (pIemCpu->enmEffOpSize)
12509 {
12510 case IEMMODE_16BIT:
12511 IEM_MC_BEGIN(0, 0);
12512 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12513 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12514 IEM_MC_ADVANCE_RIP();
12515 IEM_MC_END();
12516 return VINF_SUCCESS;
12517
12518 case IEMMODE_32BIT:
12519 IEM_MC_BEGIN(0, 0);
12520 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12521 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12522 IEM_MC_ADVANCE_RIP();
12523 IEM_MC_END();
12524 return VINF_SUCCESS;
12525
12526 case IEMMODE_64BIT:
12527 IEM_MC_BEGIN(0, 0);
12528 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12529 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12530 IEM_MC_ADVANCE_RIP();
12531 IEM_MC_END();
12532 return VINF_SUCCESS;
12533
12534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12535 }
12536 }
12537 else
12538 {
12539 /* memory access. */
12540 switch (pIemCpu->enmEffOpSize)
12541 {
12542 case IEMMODE_16BIT:
12543 IEM_MC_BEGIN(0, 1);
12544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12546 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12547 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12548 IEM_MC_ADVANCE_RIP();
12549 IEM_MC_END();
12550 return VINF_SUCCESS;
12551
12552 case IEMMODE_32BIT:
12553 IEM_MC_BEGIN(0, 1);
12554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12556 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12557 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12558 IEM_MC_ADVANCE_RIP();
12559 IEM_MC_END();
12560 return VINF_SUCCESS;
12561
12562 case IEMMODE_64BIT:
12563 IEM_MC_BEGIN(0, 1);
12564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12566 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12567 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12568 IEM_MC_ADVANCE_RIP();
12569 IEM_MC_END();
12570 return VINF_SUCCESS;
12571
12572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12573 }
12574 }
12575}
12576
12577
12578
12579
12580/** Opcode 0xc8. */
12581FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12582{
12583 IEMOP_MNEMONIC("enter Iw,Ib");
12584 IEMOP_HLP_MIN_186();
12585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12586 IEMOP_HLP_NO_LOCK_PREFIX();
12587 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12588 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12589 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12590}
12591
12592
12593/** Opcode 0xc9. */
12594FNIEMOP_DEF(iemOp_leave)
12595{
12596 IEMOP_MNEMONIC("retn");
12597 IEMOP_HLP_MIN_186();
12598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12599 IEMOP_HLP_NO_LOCK_PREFIX();
12600 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12601}
12602
12603
12604/** Opcode 0xca. */
12605FNIEMOP_DEF(iemOp_retf_Iw)
12606{
12607 IEMOP_MNEMONIC("retf Iw");
12608 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12609 IEMOP_HLP_NO_LOCK_PREFIX();
12610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12611 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12612}
12613
12614
12615/** Opcode 0xcb. */
12616FNIEMOP_DEF(iemOp_retf)
12617{
12618 IEMOP_MNEMONIC("retf");
12619 IEMOP_HLP_NO_LOCK_PREFIX();
12620 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12621 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12622}
12623
12624
12625/** Opcode 0xcc. */
12626FNIEMOP_DEF(iemOp_int_3)
12627{
12628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12629 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12630}
12631
12632
12633/** Opcode 0xcd. */
12634FNIEMOP_DEF(iemOp_int_Ib)
12635{
12636 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12639}
12640
12641
12642/** Opcode 0xce. */
12643FNIEMOP_DEF(iemOp_into)
12644{
12645 IEMOP_MNEMONIC("into");
12646 IEMOP_HLP_NO_64BIT();
12647
12648 IEM_MC_BEGIN(2, 0);
12649 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12650 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12651 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12652 IEM_MC_END();
12653 return VINF_SUCCESS;
12654}
12655
12656
12657/** Opcode 0xcf. */
12658FNIEMOP_DEF(iemOp_iret)
12659{
12660 IEMOP_MNEMONIC("iret");
12661 IEMOP_HLP_NO_LOCK_PREFIX();
12662 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12663}
12664
12665
12666/** Opcode 0xd0. */
12667FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12668{
12669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12670 PCIEMOPSHIFTSIZES pImpl;
12671 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12672 {
12673 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12674 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12675 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12676 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12677 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12678 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12679 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12680 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12681 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12682 }
12683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12684
12685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12686 {
12687 /* register */
12688 IEMOP_HLP_NO_LOCK_PREFIX();
12689 IEM_MC_BEGIN(3, 0);
12690 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12691 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12692 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12693 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12694 IEM_MC_REF_EFLAGS(pEFlags);
12695 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12696 IEM_MC_ADVANCE_RIP();
12697 IEM_MC_END();
12698 }
12699 else
12700 {
12701 /* memory */
12702 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12703 IEM_MC_BEGIN(3, 2);
12704 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12705 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12708
12709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12710 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12711 IEM_MC_FETCH_EFLAGS(EFlags);
12712 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12713
12714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12715 IEM_MC_COMMIT_EFLAGS(EFlags);
12716 IEM_MC_ADVANCE_RIP();
12717 IEM_MC_END();
12718 }
12719 return VINF_SUCCESS;
12720}
12721
12722
12723
12724/** Opcode 0xd1. */
12725FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12726{
12727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12728 PCIEMOPSHIFTSIZES pImpl;
12729 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12730 {
12731 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12732 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12733 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12734 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12735 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12736 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12737 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12738 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12739 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12740 }
12741 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12742
12743 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12744 {
12745 /* register */
12746 IEMOP_HLP_NO_LOCK_PREFIX();
12747 switch (pIemCpu->enmEffOpSize)
12748 {
12749 case IEMMODE_16BIT:
12750 IEM_MC_BEGIN(3, 0);
12751 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12752 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12754 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12755 IEM_MC_REF_EFLAGS(pEFlags);
12756 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12757 IEM_MC_ADVANCE_RIP();
12758 IEM_MC_END();
12759 return VINF_SUCCESS;
12760
12761 case IEMMODE_32BIT:
12762 IEM_MC_BEGIN(3, 0);
12763 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12764 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12765 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12766 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12767 IEM_MC_REF_EFLAGS(pEFlags);
12768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12769 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12770 IEM_MC_ADVANCE_RIP();
12771 IEM_MC_END();
12772 return VINF_SUCCESS;
12773
12774 case IEMMODE_64BIT:
12775 IEM_MC_BEGIN(3, 0);
12776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12777 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12778 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12779 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12780 IEM_MC_REF_EFLAGS(pEFlags);
12781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12782 IEM_MC_ADVANCE_RIP();
12783 IEM_MC_END();
12784 return VINF_SUCCESS;
12785
12786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12787 }
12788 }
12789 else
12790 {
12791 /* memory */
12792 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12793 switch (pIemCpu->enmEffOpSize)
12794 {
12795 case IEMMODE_16BIT:
12796 IEM_MC_BEGIN(3, 2);
12797 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12798 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12799 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12801
12802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12803 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12804 IEM_MC_FETCH_EFLAGS(EFlags);
12805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12806
12807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12808 IEM_MC_COMMIT_EFLAGS(EFlags);
12809 IEM_MC_ADVANCE_RIP();
12810 IEM_MC_END();
12811 return VINF_SUCCESS;
12812
12813 case IEMMODE_32BIT:
12814 IEM_MC_BEGIN(3, 2);
12815 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12816 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12817 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12819
12820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12821 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12822 IEM_MC_FETCH_EFLAGS(EFlags);
12823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12824
12825 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12826 IEM_MC_COMMIT_EFLAGS(EFlags);
12827 IEM_MC_ADVANCE_RIP();
12828 IEM_MC_END();
12829 return VINF_SUCCESS;
12830
12831 case IEMMODE_64BIT:
12832 IEM_MC_BEGIN(3, 2);
12833 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12834 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12835 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12837
12838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12839 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12840 IEM_MC_FETCH_EFLAGS(EFlags);
12841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12842
12843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12844 IEM_MC_COMMIT_EFLAGS(EFlags);
12845 IEM_MC_ADVANCE_RIP();
12846 IEM_MC_END();
12847 return VINF_SUCCESS;
12848
12849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12850 }
12851 }
12852}
12853
12854
12855/** Opcode 0xd2. */
12856FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12857{
12858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12859 PCIEMOPSHIFTSIZES pImpl;
12860 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12861 {
12862 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12863 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12864 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12865 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12866 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12867 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12868 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12869 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12870 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12871 }
12872 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12873
12874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12875 {
12876 /* register */
12877 IEMOP_HLP_NO_LOCK_PREFIX();
12878 IEM_MC_BEGIN(3, 0);
12879 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12880 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12881 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12882 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12883 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12884 IEM_MC_REF_EFLAGS(pEFlags);
12885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12886 IEM_MC_ADVANCE_RIP();
12887 IEM_MC_END();
12888 }
12889 else
12890 {
12891 /* memory */
12892 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12893 IEM_MC_BEGIN(3, 2);
12894 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12895 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12896 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12898
12899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12900 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12901 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12902 IEM_MC_FETCH_EFLAGS(EFlags);
12903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12904
12905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12906 IEM_MC_COMMIT_EFLAGS(EFlags);
12907 IEM_MC_ADVANCE_RIP();
12908 IEM_MC_END();
12909 }
12910 return VINF_SUCCESS;
12911}
12912
12913
12914/** Opcode 0xd3. */
12915FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12916{
12917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12918 PCIEMOPSHIFTSIZES pImpl;
12919 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12920 {
12921 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12922 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12923 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12924 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12925 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12926 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12927 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12928 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12929 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12930 }
12931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12932
12933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12934 {
12935 /* register */
12936 IEMOP_HLP_NO_LOCK_PREFIX();
12937 switch (pIemCpu->enmEffOpSize)
12938 {
12939 case IEMMODE_16BIT:
12940 IEM_MC_BEGIN(3, 0);
12941 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12942 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12944 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12945 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12946 IEM_MC_REF_EFLAGS(pEFlags);
12947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12948 IEM_MC_ADVANCE_RIP();
12949 IEM_MC_END();
12950 return VINF_SUCCESS;
12951
12952 case IEMMODE_32BIT:
12953 IEM_MC_BEGIN(3, 0);
12954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12955 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12956 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12957 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12958 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12959 IEM_MC_REF_EFLAGS(pEFlags);
12960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12961 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12962 IEM_MC_ADVANCE_RIP();
12963 IEM_MC_END();
12964 return VINF_SUCCESS;
12965
12966 case IEMMODE_64BIT:
12967 IEM_MC_BEGIN(3, 0);
12968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12969 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12970 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12971 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12972 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12973 IEM_MC_REF_EFLAGS(pEFlags);
12974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12975 IEM_MC_ADVANCE_RIP();
12976 IEM_MC_END();
12977 return VINF_SUCCESS;
12978
12979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12980 }
12981 }
12982 else
12983 {
12984 /* memory */
12985 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12986 switch (pIemCpu->enmEffOpSize)
12987 {
12988 case IEMMODE_16BIT:
12989 IEM_MC_BEGIN(3, 2);
12990 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12991 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12992 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12994
12995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12996 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12997 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12998 IEM_MC_FETCH_EFLAGS(EFlags);
12999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13000
13001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13002 IEM_MC_COMMIT_EFLAGS(EFlags);
13003 IEM_MC_ADVANCE_RIP();
13004 IEM_MC_END();
13005 return VINF_SUCCESS;
13006
13007 case IEMMODE_32BIT:
13008 IEM_MC_BEGIN(3, 2);
13009 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13010 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13011 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13013
13014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13015 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13016 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13017 IEM_MC_FETCH_EFLAGS(EFlags);
13018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13019
13020 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13021 IEM_MC_COMMIT_EFLAGS(EFlags);
13022 IEM_MC_ADVANCE_RIP();
13023 IEM_MC_END();
13024 return VINF_SUCCESS;
13025
13026 case IEMMODE_64BIT:
13027 IEM_MC_BEGIN(3, 2);
13028 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13029 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13030 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13032
13033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13034 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13035 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13036 IEM_MC_FETCH_EFLAGS(EFlags);
13037 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13038
13039 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13040 IEM_MC_COMMIT_EFLAGS(EFlags);
13041 IEM_MC_ADVANCE_RIP();
13042 IEM_MC_END();
13043 return VINF_SUCCESS;
13044
13045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13046 }
13047 }
13048}
13049
13050/** Opcode 0xd4. */
13051FNIEMOP_DEF(iemOp_aam_Ib)
13052{
13053 IEMOP_MNEMONIC("aam Ib");
13054 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13055 IEMOP_HLP_NO_LOCK_PREFIX();
13056 IEMOP_HLP_NO_64BIT();
13057 if (!bImm)
13058 return IEMOP_RAISE_DIVIDE_ERROR();
13059 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13060}
13061
13062
13063/** Opcode 0xd5. */
13064FNIEMOP_DEF(iemOp_aad_Ib)
13065{
13066 IEMOP_MNEMONIC("aad Ib");
13067 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13068 IEMOP_HLP_NO_LOCK_PREFIX();
13069 IEMOP_HLP_NO_64BIT();
13070 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13071}
13072
13073
13074/** Opcode 0xd6. */
13075FNIEMOP_DEF(iemOp_salc)
13076{
13077 IEMOP_MNEMONIC("salc");
13078 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13079 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13081 IEMOP_HLP_NO_64BIT();
13082
13083 IEM_MC_BEGIN(0, 0);
13084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13085 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13086 } IEM_MC_ELSE() {
13087 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13088 } IEM_MC_ENDIF();
13089 IEM_MC_ADVANCE_RIP();
13090 IEM_MC_END();
13091 return VINF_SUCCESS;
13092}
13093
13094
13095/** Opcode 0xd7. */
13096FNIEMOP_DEF(iemOp_xlat)
13097{
13098 IEMOP_MNEMONIC("xlat");
13099 IEMOP_HLP_NO_LOCK_PREFIX();
13100 switch (pIemCpu->enmEffAddrMode)
13101 {
13102 case IEMMODE_16BIT:
13103 IEM_MC_BEGIN(2, 0);
13104 IEM_MC_LOCAL(uint8_t, u8Tmp);
13105 IEM_MC_LOCAL(uint16_t, u16Addr);
13106 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13107 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13108 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
13109 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13110 IEM_MC_ADVANCE_RIP();
13111 IEM_MC_END();
13112 return VINF_SUCCESS;
13113
13114 case IEMMODE_32BIT:
13115 IEM_MC_BEGIN(2, 0);
13116 IEM_MC_LOCAL(uint8_t, u8Tmp);
13117 IEM_MC_LOCAL(uint32_t, u32Addr);
13118 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13119 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13120 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
13121 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13122 IEM_MC_ADVANCE_RIP();
13123 IEM_MC_END();
13124 return VINF_SUCCESS;
13125
13126 case IEMMODE_64BIT:
13127 IEM_MC_BEGIN(2, 0);
13128 IEM_MC_LOCAL(uint8_t, u8Tmp);
13129 IEM_MC_LOCAL(uint64_t, u64Addr);
13130 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13131 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13132 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
13133 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13134 IEM_MC_ADVANCE_RIP();
13135 IEM_MC_END();
13136 return VINF_SUCCESS;
13137
13138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13139 }
13140}
13141
13142
13143/**
13144 * Common worker for FPU instructions working on ST0 and STn, and storing the
13145 * result in ST0.
13146 *
13147 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13148 */
13149FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13150{
13151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13152
13153 IEM_MC_BEGIN(3, 1);
13154 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13155 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13157 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13158
13159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13161 IEM_MC_PREPARE_FPU_USAGE();
13162 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13163 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13164 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13165 IEM_MC_ELSE()
13166 IEM_MC_FPU_STACK_UNDERFLOW(0);
13167 IEM_MC_ENDIF();
13168 IEM_MC_ADVANCE_RIP();
13169
13170 IEM_MC_END();
13171 return VINF_SUCCESS;
13172}
13173
13174
13175/**
13176 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13177 * flags.
13178 *
13179 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13180 */
13181FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13182{
13183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13184
13185 IEM_MC_BEGIN(3, 1);
13186 IEM_MC_LOCAL(uint16_t, u16Fsw);
13187 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13189 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13190
13191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13193 IEM_MC_PREPARE_FPU_USAGE();
13194 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13195 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13196 IEM_MC_UPDATE_FSW(u16Fsw);
13197 IEM_MC_ELSE()
13198 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13199 IEM_MC_ENDIF();
13200 IEM_MC_ADVANCE_RIP();
13201
13202 IEM_MC_END();
13203 return VINF_SUCCESS;
13204}
13205
13206
13207/**
13208 * Common worker for FPU instructions working on ST0 and STn, only affecting
13209 * flags, and popping when done.
13210 *
13211 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13212 */
13213FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13214{
13215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13216
13217 IEM_MC_BEGIN(3, 1);
13218 IEM_MC_LOCAL(uint16_t, u16Fsw);
13219 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13220 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13221 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13222
13223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13225 IEM_MC_PREPARE_FPU_USAGE();
13226 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13227 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13228 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13229 IEM_MC_ELSE()
13230 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13231 IEM_MC_ENDIF();
13232 IEM_MC_ADVANCE_RIP();
13233
13234 IEM_MC_END();
13235 return VINF_SUCCESS;
13236}
13237
13238
13239/** Opcode 0xd8 11/0. */
13240FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13241{
13242 IEMOP_MNEMONIC("fadd st0,stN");
13243 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13244}
13245
13246
13247/** Opcode 0xd8 11/1. */
13248FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13249{
13250 IEMOP_MNEMONIC("fmul st0,stN");
13251 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13252}
13253
13254
13255/** Opcode 0xd8 11/2. */
13256FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13257{
13258 IEMOP_MNEMONIC("fcom st0,stN");
13259 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13260}
13261
13262
13263/** Opcode 0xd8 11/3. */
13264FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13265{
13266 IEMOP_MNEMONIC("fcomp st0,stN");
13267 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13268}
13269
13270
13271/** Opcode 0xd8 11/4. */
13272FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13273{
13274 IEMOP_MNEMONIC("fsub st0,stN");
13275 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13276}
13277
13278
13279/** Opcode 0xd8 11/5. */
13280FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13281{
13282 IEMOP_MNEMONIC("fsubr st0,stN");
13283 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13284}
13285
13286
13287/** Opcode 0xd8 11/6. */
13288FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13289{
13290 IEMOP_MNEMONIC("fdiv st0,stN");
13291 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13292}
13293
13294
13295/** Opcode 0xd8 11/7. */
13296FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13297{
13298 IEMOP_MNEMONIC("fdivr st0,stN");
13299 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13300}
13301
13302
13303/**
13304 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13305 * the result in ST0.
13306 *
13307 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13308 */
13309FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13310{
13311 IEM_MC_BEGIN(3, 3);
13312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13313 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13314 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13315 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13317 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13318
13319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13321
13322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13324 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13325
13326 IEM_MC_PREPARE_FPU_USAGE();
13327 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13328 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13329 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13330 IEM_MC_ELSE()
13331 IEM_MC_FPU_STACK_UNDERFLOW(0);
13332 IEM_MC_ENDIF();
13333 IEM_MC_ADVANCE_RIP();
13334
13335 IEM_MC_END();
13336 return VINF_SUCCESS;
13337}
13338
13339
13340/** Opcode 0xd8 !11/0. */
13341FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13342{
13343 IEMOP_MNEMONIC("fadd st0,m32r");
13344 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13345}
13346
13347
13348/** Opcode 0xd8 !11/1. */
13349FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13350{
13351 IEMOP_MNEMONIC("fmul st0,m32r");
13352 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13353}
13354
13355
13356/** Opcode 0xd8 !11/2. */
13357FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13358{
13359 IEMOP_MNEMONIC("fcom st0,m32r");
13360
13361 IEM_MC_BEGIN(3, 3);
13362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13363 IEM_MC_LOCAL(uint16_t, u16Fsw);
13364 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13366 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13367 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13368
13369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13371
13372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13374 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13375
13376 IEM_MC_PREPARE_FPU_USAGE();
13377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13379 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13380 IEM_MC_ELSE()
13381 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13382 IEM_MC_ENDIF();
13383 IEM_MC_ADVANCE_RIP();
13384
13385 IEM_MC_END();
13386 return VINF_SUCCESS;
13387}
13388
13389
13390/** Opcode 0xd8 !11/3. */
13391FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13392{
13393 IEMOP_MNEMONIC("fcomp st0,m32r");
13394
13395 IEM_MC_BEGIN(3, 3);
13396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13397 IEM_MC_LOCAL(uint16_t, u16Fsw);
13398 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13399 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13400 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13401 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13402
13403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13405
13406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13408 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13409
13410 IEM_MC_PREPARE_FPU_USAGE();
13411 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13412 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13413 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13414 IEM_MC_ELSE()
13415 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13416 IEM_MC_ENDIF();
13417 IEM_MC_ADVANCE_RIP();
13418
13419 IEM_MC_END();
13420 return VINF_SUCCESS;
13421}
13422
13423
13424/** Opcode 0xd8 !11/4. */
13425FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13426{
13427 IEMOP_MNEMONIC("fsub st0,m32r");
13428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13429}
13430
13431
13432/** Opcode 0xd8 !11/5. */
13433FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13434{
13435 IEMOP_MNEMONIC("fsubr st0,m32r");
13436 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13437}
13438
13439
13440/** Opcode 0xd8 !11/6. */
13441FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13442{
13443 IEMOP_MNEMONIC("fdiv st0,m32r");
13444 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13445}
13446
13447
13448/** Opcode 0xd8 !11/7. */
13449FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13450{
13451 IEMOP_MNEMONIC("fdivr st0,m32r");
13452 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13453}
13454
13455
13456/** Opcode 0xd8. */
13457FNIEMOP_DEF(iemOp_EscF0)
13458{
13459 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13461
13462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13463 {
13464 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13465 {
13466 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13467 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13468 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13469 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13470 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13471 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13472 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13473 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13475 }
13476 }
13477 else
13478 {
13479 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13480 {
13481 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13482 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13483 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13484 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13485 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13486 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13487 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13488 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13490 }
13491 }
13492}
13493
13494
13495/** Opcode 0xd9 /0 mem32real
13496 * @sa iemOp_fld_m64r */
13497FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13498{
13499 IEMOP_MNEMONIC("fld m32r");
13500
13501 IEM_MC_BEGIN(2, 3);
13502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13503 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13504 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13505 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13506 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13507
13508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13510
13511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13513 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13514
13515 IEM_MC_PREPARE_FPU_USAGE();
13516 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13517 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13518 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13519 IEM_MC_ELSE()
13520 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13521 IEM_MC_ENDIF();
13522 IEM_MC_ADVANCE_RIP();
13523
13524 IEM_MC_END();
13525 return VINF_SUCCESS;
13526}
13527
13528
13529/** Opcode 0xd9 !11/2 mem32real */
13530FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13531{
13532 IEMOP_MNEMONIC("fst m32r");
13533 IEM_MC_BEGIN(3, 2);
13534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13535 IEM_MC_LOCAL(uint16_t, u16Fsw);
13536 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13537 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13538 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13539
13540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13542 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13543 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13544
13545 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13546 IEM_MC_PREPARE_FPU_USAGE();
13547 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13548 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13549 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13550 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13551 IEM_MC_ELSE()
13552 IEM_MC_IF_FCW_IM()
13553 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13554 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13555 IEM_MC_ENDIF();
13556 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13557 IEM_MC_ENDIF();
13558 IEM_MC_ADVANCE_RIP();
13559
13560 IEM_MC_END();
13561 return VINF_SUCCESS;
13562}
13563
13564
13565/** Opcode 0xd9 !11/3 */
13566FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13567{
13568 IEMOP_MNEMONIC("fstp m32r");
13569 IEM_MC_BEGIN(3, 2);
13570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13571 IEM_MC_LOCAL(uint16_t, u16Fsw);
13572 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13573 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13574 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13575
13576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13580
13581 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13582 IEM_MC_PREPARE_FPU_USAGE();
13583 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13584 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13585 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13586 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13587 IEM_MC_ELSE()
13588 IEM_MC_IF_FCW_IM()
13589 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13590 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13591 IEM_MC_ENDIF();
13592 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13593 IEM_MC_ENDIF();
13594 IEM_MC_ADVANCE_RIP();
13595
13596 IEM_MC_END();
13597 return VINF_SUCCESS;
13598}
13599
13600
13601/** Opcode 0xd9 !11/4 */
13602FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13603{
13604 IEMOP_MNEMONIC("fldenv m14/28byte");
13605 IEM_MC_BEGIN(3, 0);
13606 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13607 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13608 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13612 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13613 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13614 IEM_MC_END();
13615 return VINF_SUCCESS;
13616}
13617
13618
13619/** Opcode 0xd9 !11/5 */
13620FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13621{
13622 IEMOP_MNEMONIC("fldcw m2byte");
13623 IEM_MC_BEGIN(1, 1);
13624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13625 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13629 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13630 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13631 IEM_MC_END();
13632 return VINF_SUCCESS;
13633}
13634
13635
13636/** Opcode 0xd9 !11/6 */
13637FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13638{
13639 IEMOP_MNEMONIC("fstenv m14/m28byte");
13640 IEM_MC_BEGIN(3, 0);
13641 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13642 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13643 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13647 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13648 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13649 IEM_MC_END();
13650 return VINF_SUCCESS;
13651}
13652
13653
13654/** Opcode 0xd9 !11/7 */
13655FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13656{
13657 IEMOP_MNEMONIC("fnstcw m2byte");
13658 IEM_MC_BEGIN(2, 0);
13659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13660 IEM_MC_LOCAL(uint16_t, u16Fcw);
13661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13665 IEM_MC_FETCH_FCW(u16Fcw);
13666 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13667 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13668 IEM_MC_END();
13669 return VINF_SUCCESS;
13670}
13671
13672
13673/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13674FNIEMOP_DEF(iemOp_fnop)
13675{
13676 IEMOP_MNEMONIC("fnop");
13677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13678
13679 IEM_MC_BEGIN(0, 0);
13680 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13682 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13683 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13684 * intel optimizations. Investigate. */
13685 IEM_MC_UPDATE_FPU_OPCODE_IP();
13686 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13687 IEM_MC_END();
13688 return VINF_SUCCESS;
13689}
13690
13691
13692/** Opcode 0xd9 11/0 stN */
13693FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13694{
13695 IEMOP_MNEMONIC("fld stN");
13696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13697
13698 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13699 * indicates that it does. */
13700 IEM_MC_BEGIN(0, 2);
13701 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13702 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13705
13706 IEM_MC_PREPARE_FPU_USAGE();
13707 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13708 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13709 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13710 IEM_MC_ELSE()
13711 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13712 IEM_MC_ENDIF();
13713
13714 IEM_MC_ADVANCE_RIP();
13715 IEM_MC_END();
13716
13717 return VINF_SUCCESS;
13718}
13719
13720
13721/** Opcode 0xd9 11/3 stN */
13722FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13723{
13724 IEMOP_MNEMONIC("fxch stN");
13725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13726
13727 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13728 * indicates that it does. */
13729 IEM_MC_BEGIN(1, 3);
13730 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13731 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13732 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13733 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13736
13737 IEM_MC_PREPARE_FPU_USAGE();
13738 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13739 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13740 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13741 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13742 IEM_MC_ELSE()
13743 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13744 IEM_MC_ENDIF();
13745
13746 IEM_MC_ADVANCE_RIP();
13747 IEM_MC_END();
13748
13749 return VINF_SUCCESS;
13750}
13751
13752
13753/** Opcode 0xd9 11/4, 0xdd 11/2. */
13754FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13755{
13756 IEMOP_MNEMONIC("fstp st0,stN");
13757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13758
13759 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13760 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13761 if (!iDstReg)
13762 {
13763 IEM_MC_BEGIN(0, 1);
13764 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13767
13768 IEM_MC_PREPARE_FPU_USAGE();
13769 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13770 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13771 IEM_MC_ELSE()
13772 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13773 IEM_MC_ENDIF();
13774
13775 IEM_MC_ADVANCE_RIP();
13776 IEM_MC_END();
13777 }
13778 else
13779 {
13780 IEM_MC_BEGIN(0, 2);
13781 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13782 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13785
13786 IEM_MC_PREPARE_FPU_USAGE();
13787 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13788 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13789 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13790 IEM_MC_ELSE()
13791 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13792 IEM_MC_ENDIF();
13793
13794 IEM_MC_ADVANCE_RIP();
13795 IEM_MC_END();
13796 }
13797 return VINF_SUCCESS;
13798}
13799
13800
13801/**
13802 * Common worker for FPU instructions working on ST0 and replaces it with the
13803 * result, i.e. unary operators.
13804 *
13805 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13806 */
13807FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13808{
13809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13810
13811 IEM_MC_BEGIN(2, 1);
13812 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13813 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13814 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13815
13816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13818 IEM_MC_PREPARE_FPU_USAGE();
13819 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13820 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13821 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13822 IEM_MC_ELSE()
13823 IEM_MC_FPU_STACK_UNDERFLOW(0);
13824 IEM_MC_ENDIF();
13825 IEM_MC_ADVANCE_RIP();
13826
13827 IEM_MC_END();
13828 return VINF_SUCCESS;
13829}
13830
13831
13832/** Opcode 0xd9 0xe0. */
13833FNIEMOP_DEF(iemOp_fchs)
13834{
13835 IEMOP_MNEMONIC("fchs st0");
13836 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13837}
13838
13839
13840/** Opcode 0xd9 0xe1. */
13841FNIEMOP_DEF(iemOp_fabs)
13842{
13843 IEMOP_MNEMONIC("fabs st0");
13844 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13845}
13846
13847
13848/**
13849 * Common worker for FPU instructions working on ST0 and only returns FSW.
13850 *
13851 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13852 */
13853FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13854{
13855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13856
13857 IEM_MC_BEGIN(2, 1);
13858 IEM_MC_LOCAL(uint16_t, u16Fsw);
13859 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13860 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13861
13862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13864 IEM_MC_PREPARE_FPU_USAGE();
13865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13866 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13867 IEM_MC_UPDATE_FSW(u16Fsw);
13868 IEM_MC_ELSE()
13869 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13870 IEM_MC_ENDIF();
13871 IEM_MC_ADVANCE_RIP();
13872
13873 IEM_MC_END();
13874 return VINF_SUCCESS;
13875}
13876
13877
13878/** Opcode 0xd9 0xe4. */
13879FNIEMOP_DEF(iemOp_ftst)
13880{
13881 IEMOP_MNEMONIC("ftst st0");
13882 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13883}
13884
13885
13886/** Opcode 0xd9 0xe5. */
13887FNIEMOP_DEF(iemOp_fxam)
13888{
13889 IEMOP_MNEMONIC("fxam st0");
13890 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13891}
13892
13893
13894/**
13895 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13896 *
13897 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13898 */
13899FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13900{
13901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13902
13903 IEM_MC_BEGIN(1, 1);
13904 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13905 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13906
13907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13909 IEM_MC_PREPARE_FPU_USAGE();
13910 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13911 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13912 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13913 IEM_MC_ELSE()
13914 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13915 IEM_MC_ENDIF();
13916 IEM_MC_ADVANCE_RIP();
13917
13918 IEM_MC_END();
13919 return VINF_SUCCESS;
13920}
13921
13922
13923/** Opcode 0xd9 0xe8. */
13924FNIEMOP_DEF(iemOp_fld1)
13925{
13926 IEMOP_MNEMONIC("fld1");
13927 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13928}
13929
13930
13931/** Opcode 0xd9 0xe9. */
13932FNIEMOP_DEF(iemOp_fldl2t)
13933{
13934 IEMOP_MNEMONIC("fldl2t");
13935 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13936}
13937
13938
13939/** Opcode 0xd9 0xea. */
13940FNIEMOP_DEF(iemOp_fldl2e)
13941{
13942 IEMOP_MNEMONIC("fldl2e");
13943 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13944}
13945
13946/** Opcode 0xd9 0xeb. */
13947FNIEMOP_DEF(iemOp_fldpi)
13948{
13949 IEMOP_MNEMONIC("fldpi");
13950 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13951}
13952
13953
13954/** Opcode 0xd9 0xec. */
13955FNIEMOP_DEF(iemOp_fldlg2)
13956{
13957 IEMOP_MNEMONIC("fldlg2");
13958 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13959}
13960
13961/** Opcode 0xd9 0xed. */
13962FNIEMOP_DEF(iemOp_fldln2)
13963{
13964 IEMOP_MNEMONIC("fldln2");
13965 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13966}
13967
13968
13969/** Opcode 0xd9 0xee. */
13970FNIEMOP_DEF(iemOp_fldz)
13971{
13972 IEMOP_MNEMONIC("fldz");
13973 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13974}
13975
13976
13977/** Opcode 0xd9 0xf0. */
13978FNIEMOP_DEF(iemOp_f2xm1)
13979{
13980 IEMOP_MNEMONIC("f2xm1 st0");
13981 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13982}
13983
13984
13985/** Opcode 0xd9 0xf1. */
13986FNIEMOP_DEF(iemOp_fylx2)
13987{
13988 IEMOP_MNEMONIC("fylx2 st0");
13989 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13990}
13991
13992
13993/**
13994 * Common worker for FPU instructions working on ST0 and having two outputs, one
13995 * replacing ST0 and one pushed onto the stack.
13996 *
13997 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13998 */
13999FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14000{
14001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14002
14003 IEM_MC_BEGIN(2, 1);
14004 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14005 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14007
14008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14009 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14010 IEM_MC_PREPARE_FPU_USAGE();
14011 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14012 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14013 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14014 IEM_MC_ELSE()
14015 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14016 IEM_MC_ENDIF();
14017 IEM_MC_ADVANCE_RIP();
14018
14019 IEM_MC_END();
14020 return VINF_SUCCESS;
14021}
14022
14023
14024/** Opcode 0xd9 0xf2. */
14025FNIEMOP_DEF(iemOp_fptan)
14026{
14027 IEMOP_MNEMONIC("fptan st0");
14028 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14029}
14030
14031
14032/**
14033 * Common worker for FPU instructions working on STn and ST0, storing the result
14034 * in STn, and popping the stack unless IE, DE or ZE was raised.
14035 *
14036 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14037 */
14038FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14039{
14040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14041
14042 IEM_MC_BEGIN(3, 1);
14043 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14044 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14045 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14046 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14047
14048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14050
14051 IEM_MC_PREPARE_FPU_USAGE();
14052 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14053 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14054 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14055 IEM_MC_ELSE()
14056 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14057 IEM_MC_ENDIF();
14058 IEM_MC_ADVANCE_RIP();
14059
14060 IEM_MC_END();
14061 return VINF_SUCCESS;
14062}
14063
14064
14065/** Opcode 0xd9 0xf3. */
14066FNIEMOP_DEF(iemOp_fpatan)
14067{
14068 IEMOP_MNEMONIC("fpatan st1,st0");
14069 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14070}
14071
14072
14073/** Opcode 0xd9 0xf4. */
14074FNIEMOP_DEF(iemOp_fxtract)
14075{
14076 IEMOP_MNEMONIC("fxtract st0");
14077 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14078}
14079
14080
14081/** Opcode 0xd9 0xf5. */
14082FNIEMOP_DEF(iemOp_fprem1)
14083{
14084 IEMOP_MNEMONIC("fprem1 st0, st1");
14085 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14086}
14087
14088
14089/** Opcode 0xd9 0xf6. */
14090FNIEMOP_DEF(iemOp_fdecstp)
14091{
14092 IEMOP_MNEMONIC("fdecstp");
14093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14094 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14095 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14096 * FINCSTP and FDECSTP. */
14097
14098 IEM_MC_BEGIN(0,0);
14099
14100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14102
14103 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14104 IEM_MC_FPU_STACK_DEC_TOP();
14105 IEM_MC_UPDATE_FSW_CONST(0);
14106
14107 IEM_MC_ADVANCE_RIP();
14108 IEM_MC_END();
14109 return VINF_SUCCESS;
14110}
14111
14112
14113/** Opcode 0xd9 0xf7. */
14114FNIEMOP_DEF(iemOp_fincstp)
14115{
14116 IEMOP_MNEMONIC("fincstp");
14117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14118 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14119 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14120 * FINCSTP and FDECSTP. */
14121
14122 IEM_MC_BEGIN(0,0);
14123
14124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14126
14127 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14128 IEM_MC_FPU_STACK_INC_TOP();
14129 IEM_MC_UPDATE_FSW_CONST(0);
14130
14131 IEM_MC_ADVANCE_RIP();
14132 IEM_MC_END();
14133 return VINF_SUCCESS;
14134}
14135
14136
14137/** Opcode 0xd9 0xf8. */
14138FNIEMOP_DEF(iemOp_fprem)
14139{
14140 IEMOP_MNEMONIC("fprem st0, st1");
14141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14142}
14143
14144
14145/** Opcode 0xd9 0xf9. */
14146FNIEMOP_DEF(iemOp_fyl2xp1)
14147{
14148 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14149 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14150}
14151
14152
14153/** Opcode 0xd9 0xfa. */
14154FNIEMOP_DEF(iemOp_fsqrt)
14155{
14156 IEMOP_MNEMONIC("fsqrt st0");
14157 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14158}
14159
14160
14161/** Opcode 0xd9 0xfb. */
14162FNIEMOP_DEF(iemOp_fsincos)
14163{
14164 IEMOP_MNEMONIC("fsincos st0");
14165 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14166}
14167
14168
14169/** Opcode 0xd9 0xfc. */
14170FNIEMOP_DEF(iemOp_frndint)
14171{
14172 IEMOP_MNEMONIC("frndint st0");
14173 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14174}
14175
14176
14177/** Opcode 0xd9 0xfd. */
14178FNIEMOP_DEF(iemOp_fscale)
14179{
14180 IEMOP_MNEMONIC("fscale st0, st1");
14181 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14182}
14183
14184
14185/** Opcode 0xd9 0xfe. */
14186FNIEMOP_DEF(iemOp_fsin)
14187{
14188 IEMOP_MNEMONIC("fsin st0");
14189 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14190}
14191
14192
14193/** Opcode 0xd9 0xff. */
14194FNIEMOP_DEF(iemOp_fcos)
14195{
14196 IEMOP_MNEMONIC("fcos st0");
14197 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14198}
14199
14200
14201/** Used by iemOp_EscF1. */
14202static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14203{
14204 /* 0xe0 */ iemOp_fchs,
14205 /* 0xe1 */ iemOp_fabs,
14206 /* 0xe2 */ iemOp_Invalid,
14207 /* 0xe3 */ iemOp_Invalid,
14208 /* 0xe4 */ iemOp_ftst,
14209 /* 0xe5 */ iemOp_fxam,
14210 /* 0xe6 */ iemOp_Invalid,
14211 /* 0xe7 */ iemOp_Invalid,
14212 /* 0xe8 */ iemOp_fld1,
14213 /* 0xe9 */ iemOp_fldl2t,
14214 /* 0xea */ iemOp_fldl2e,
14215 /* 0xeb */ iemOp_fldpi,
14216 /* 0xec */ iemOp_fldlg2,
14217 /* 0xed */ iemOp_fldln2,
14218 /* 0xee */ iemOp_fldz,
14219 /* 0xef */ iemOp_Invalid,
14220 /* 0xf0 */ iemOp_f2xm1,
14221 /* 0xf1 */ iemOp_fylx2,
14222 /* 0xf2 */ iemOp_fptan,
14223 /* 0xf3 */ iemOp_fpatan,
14224 /* 0xf4 */ iemOp_fxtract,
14225 /* 0xf5 */ iemOp_fprem1,
14226 /* 0xf6 */ iemOp_fdecstp,
14227 /* 0xf7 */ iemOp_fincstp,
14228 /* 0xf8 */ iemOp_fprem,
14229 /* 0xf9 */ iemOp_fyl2xp1,
14230 /* 0xfa */ iemOp_fsqrt,
14231 /* 0xfb */ iemOp_fsincos,
14232 /* 0xfc */ iemOp_frndint,
14233 /* 0xfd */ iemOp_fscale,
14234 /* 0xfe */ iemOp_fsin,
14235 /* 0xff */ iemOp_fcos
14236};
14237
14238
14239/** Opcode 0xd9. */
14240FNIEMOP_DEF(iemOp_EscF1)
14241{
14242 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14245 {
14246 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14247 {
14248 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14249 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14250 case 2:
14251 if (bRm == 0xd0)
14252 return FNIEMOP_CALL(iemOp_fnop);
14253 return IEMOP_RAISE_INVALID_OPCODE();
14254 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14255 case 4:
14256 case 5:
14257 case 6:
14258 case 7:
14259 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14260 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14262 }
14263 }
14264 else
14265 {
14266 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14267 {
14268 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14269 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14270 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14271 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14272 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14273 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14274 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14275 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14277 }
14278 }
14279}
14280
14281
14282/** Opcode 0xda 11/0. */
14283FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14284{
14285 IEMOP_MNEMONIC("fcmovb st0,stN");
14286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14287
14288 IEM_MC_BEGIN(0, 1);
14289 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14290
14291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14293
14294 IEM_MC_PREPARE_FPU_USAGE();
14295 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14297 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14298 IEM_MC_ENDIF();
14299 IEM_MC_UPDATE_FPU_OPCODE_IP();
14300 IEM_MC_ELSE()
14301 IEM_MC_FPU_STACK_UNDERFLOW(0);
14302 IEM_MC_ENDIF();
14303 IEM_MC_ADVANCE_RIP();
14304
14305 IEM_MC_END();
14306 return VINF_SUCCESS;
14307}
14308
14309
14310/** Opcode 0xda 11/1. */
14311FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14312{
14313 IEMOP_MNEMONIC("fcmove st0,stN");
14314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14315
14316 IEM_MC_BEGIN(0, 1);
14317 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14318
14319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14321
14322 IEM_MC_PREPARE_FPU_USAGE();
14323 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14325 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14326 IEM_MC_ENDIF();
14327 IEM_MC_UPDATE_FPU_OPCODE_IP();
14328 IEM_MC_ELSE()
14329 IEM_MC_FPU_STACK_UNDERFLOW(0);
14330 IEM_MC_ENDIF();
14331 IEM_MC_ADVANCE_RIP();
14332
14333 IEM_MC_END();
14334 return VINF_SUCCESS;
14335}
14336
14337
14338/** Opcode 0xda 11/2. */
14339FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14340{
14341 IEMOP_MNEMONIC("fcmovbe st0,stN");
14342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14343
14344 IEM_MC_BEGIN(0, 1);
14345 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14346
14347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14349
14350 IEM_MC_PREPARE_FPU_USAGE();
14351 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14352 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14353 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14354 IEM_MC_ENDIF();
14355 IEM_MC_UPDATE_FPU_OPCODE_IP();
14356 IEM_MC_ELSE()
14357 IEM_MC_FPU_STACK_UNDERFLOW(0);
14358 IEM_MC_ENDIF();
14359 IEM_MC_ADVANCE_RIP();
14360
14361 IEM_MC_END();
14362 return VINF_SUCCESS;
14363}
14364
14365
14366/** Opcode 0xda 11/3. */
14367FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14368{
14369 IEMOP_MNEMONIC("fcmovu st0,stN");
14370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14371
14372 IEM_MC_BEGIN(0, 1);
14373 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14374
14375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14377
14378 IEM_MC_PREPARE_FPU_USAGE();
14379 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14381 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14382 IEM_MC_ENDIF();
14383 IEM_MC_UPDATE_FPU_OPCODE_IP();
14384 IEM_MC_ELSE()
14385 IEM_MC_FPU_STACK_UNDERFLOW(0);
14386 IEM_MC_ENDIF();
14387 IEM_MC_ADVANCE_RIP();
14388
14389 IEM_MC_END();
14390 return VINF_SUCCESS;
14391}
14392
14393
14394/**
14395 * Common worker for FPU instructions working on ST0 and STn, only affecting
14396 * flags, and popping twice when done.
14397 *
14398 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14399 */
14400FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14401{
14402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14403
14404 IEM_MC_BEGIN(3, 1);
14405 IEM_MC_LOCAL(uint16_t, u16Fsw);
14406 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14408 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14409
14410 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14411 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14412
14413 IEM_MC_PREPARE_FPU_USAGE();
14414 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14415 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14416 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14417 IEM_MC_ELSE()
14418 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14419 IEM_MC_ENDIF();
14420 IEM_MC_ADVANCE_RIP();
14421
14422 IEM_MC_END();
14423 return VINF_SUCCESS;
14424}
14425
14426
14427/** Opcode 0xda 0xe9. */
14428FNIEMOP_DEF(iemOp_fucompp)
14429{
14430 IEMOP_MNEMONIC("fucompp st0,stN");
14431 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14432}
14433
14434
14435/**
14436 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14437 * the result in ST0.
14438 *
14439 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14440 */
14441FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14442{
14443 IEM_MC_BEGIN(3, 3);
14444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14445 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14446 IEM_MC_LOCAL(int32_t, i32Val2);
14447 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14448 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14449 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14450
14451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14453
14454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14456 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14457
14458 IEM_MC_PREPARE_FPU_USAGE();
14459 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14460 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14461 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14462 IEM_MC_ELSE()
14463 IEM_MC_FPU_STACK_UNDERFLOW(0);
14464 IEM_MC_ENDIF();
14465 IEM_MC_ADVANCE_RIP();
14466
14467 IEM_MC_END();
14468 return VINF_SUCCESS;
14469}
14470
14471
14472/** Opcode 0xda !11/0. */
14473FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14474{
14475 IEMOP_MNEMONIC("fiadd m32i");
14476 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14477}
14478
14479
14480/** Opcode 0xda !11/1. */
14481FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14482{
14483 IEMOP_MNEMONIC("fimul m32i");
14484 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14485}
14486
14487
14488/** Opcode 0xda !11/2. */
14489FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14490{
14491 IEMOP_MNEMONIC("ficom st0,m32i");
14492
14493 IEM_MC_BEGIN(3, 3);
14494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14495 IEM_MC_LOCAL(uint16_t, u16Fsw);
14496 IEM_MC_LOCAL(int32_t, i32Val2);
14497 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14499 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14500
14501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14503
14504 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14505 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14506 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14507
14508 IEM_MC_PREPARE_FPU_USAGE();
14509 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14510 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14511 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14512 IEM_MC_ELSE()
14513 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14514 IEM_MC_ENDIF();
14515 IEM_MC_ADVANCE_RIP();
14516
14517 IEM_MC_END();
14518 return VINF_SUCCESS;
14519}
14520
14521
14522/** Opcode 0xda !11/3. */
14523FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14524{
14525 IEMOP_MNEMONIC("ficomp st0,m32i");
14526
14527 IEM_MC_BEGIN(3, 3);
14528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14529 IEM_MC_LOCAL(uint16_t, u16Fsw);
14530 IEM_MC_LOCAL(int32_t, i32Val2);
14531 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14533 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14534
14535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14537
14538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14540 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14541
14542 IEM_MC_PREPARE_FPU_USAGE();
14543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14544 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14545 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14546 IEM_MC_ELSE()
14547 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14548 IEM_MC_ENDIF();
14549 IEM_MC_ADVANCE_RIP();
14550
14551 IEM_MC_END();
14552 return VINF_SUCCESS;
14553}
14554
14555
14556/** Opcode 0xda !11/4. */
14557FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14558{
14559 IEMOP_MNEMONIC("fisub m32i");
14560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14561}
14562
14563
14564/** Opcode 0xda !11/5. */
14565FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14566{
14567 IEMOP_MNEMONIC("fisubr m32i");
14568 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14569}
14570
14571
14572/** Opcode 0xda !11/6. */
14573FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14574{
14575 IEMOP_MNEMONIC("fidiv m32i");
14576 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14577}
14578
14579
14580/** Opcode 0xda !11/7. */
14581FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14582{
14583 IEMOP_MNEMONIC("fidivr m32i");
14584 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14585}
14586
14587
14588/** Opcode 0xda. */
14589FNIEMOP_DEF(iemOp_EscF2)
14590{
14591 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14594 {
14595 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14596 {
14597 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14598 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14599 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14600 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14601 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14602 case 5:
14603 if (bRm == 0xe9)
14604 return FNIEMOP_CALL(iemOp_fucompp);
14605 return IEMOP_RAISE_INVALID_OPCODE();
14606 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14607 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14609 }
14610 }
14611 else
14612 {
14613 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14614 {
14615 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14616 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14617 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14618 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14619 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14620 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14621 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14622 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14624 }
14625 }
14626}
14627
14628
14629/** Opcode 0xdb !11/0. */
14630FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14631{
14632 IEMOP_MNEMONIC("fild m32i");
14633
14634 IEM_MC_BEGIN(2, 3);
14635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14636 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14637 IEM_MC_LOCAL(int32_t, i32Val);
14638 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14639 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14640
14641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14643
14644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14646 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14647
14648 IEM_MC_PREPARE_FPU_USAGE();
14649 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14650 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14651 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14652 IEM_MC_ELSE()
14653 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14654 IEM_MC_ENDIF();
14655 IEM_MC_ADVANCE_RIP();
14656
14657 IEM_MC_END();
14658 return VINF_SUCCESS;
14659}
14660
14661
14662/** Opcode 0xdb !11/1. */
14663FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14664{
14665 IEMOP_MNEMONIC("fisttp m32i");
14666 IEM_MC_BEGIN(3, 2);
14667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14668 IEM_MC_LOCAL(uint16_t, u16Fsw);
14669 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14670 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14671 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14672
14673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14677
14678 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14679 IEM_MC_PREPARE_FPU_USAGE();
14680 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14681 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14682 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14683 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14684 IEM_MC_ELSE()
14685 IEM_MC_IF_FCW_IM()
14686 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14687 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14688 IEM_MC_ENDIF();
14689 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14690 IEM_MC_ENDIF();
14691 IEM_MC_ADVANCE_RIP();
14692
14693 IEM_MC_END();
14694 return VINF_SUCCESS;
14695}
14696
14697
14698/** Opcode 0xdb !11/2. */
14699FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14700{
14701 IEMOP_MNEMONIC("fist m32i");
14702 IEM_MC_BEGIN(3, 2);
14703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14704 IEM_MC_LOCAL(uint16_t, u16Fsw);
14705 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14706 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14707 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14708
14709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14713
14714 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14715 IEM_MC_PREPARE_FPU_USAGE();
14716 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14717 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14718 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14719 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14720 IEM_MC_ELSE()
14721 IEM_MC_IF_FCW_IM()
14722 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14723 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14724 IEM_MC_ENDIF();
14725 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14726 IEM_MC_ENDIF();
14727 IEM_MC_ADVANCE_RIP();
14728
14729 IEM_MC_END();
14730 return VINF_SUCCESS;
14731}
14732
14733
14734/** Opcode 0xdb !11/3. */
14735FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14736{
14737 IEMOP_MNEMONIC("fisttp m32i");
14738 IEM_MC_BEGIN(3, 2);
14739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14740 IEM_MC_LOCAL(uint16_t, u16Fsw);
14741 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14742 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14744
14745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14749
14750 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14751 IEM_MC_PREPARE_FPU_USAGE();
14752 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14753 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14754 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14755 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14756 IEM_MC_ELSE()
14757 IEM_MC_IF_FCW_IM()
14758 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14759 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14760 IEM_MC_ENDIF();
14761 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14762 IEM_MC_ENDIF();
14763 IEM_MC_ADVANCE_RIP();
14764
14765 IEM_MC_END();
14766 return VINF_SUCCESS;
14767}
14768
14769
14770/** Opcode 0xdb !11/5. */
14771FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14772{
14773 IEMOP_MNEMONIC("fld m80r");
14774
14775 IEM_MC_BEGIN(2, 3);
14776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14777 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14778 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14779 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14780 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14781
14782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14784
14785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14787 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14788
14789 IEM_MC_PREPARE_FPU_USAGE();
14790 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14791 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14792 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14793 IEM_MC_ELSE()
14794 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14795 IEM_MC_ENDIF();
14796 IEM_MC_ADVANCE_RIP();
14797
14798 IEM_MC_END();
14799 return VINF_SUCCESS;
14800}
14801
14802
14803/** Opcode 0xdb !11/7. */
14804FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14805{
14806 IEMOP_MNEMONIC("fstp m80r");
14807 IEM_MC_BEGIN(3, 2);
14808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14809 IEM_MC_LOCAL(uint16_t, u16Fsw);
14810 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14811 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14812 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14813
14814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14818
14819 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14820 IEM_MC_PREPARE_FPU_USAGE();
14821 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14822 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14823 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14824 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14825 IEM_MC_ELSE()
14826 IEM_MC_IF_FCW_IM()
14827 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14828 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14829 IEM_MC_ENDIF();
14830 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14831 IEM_MC_ENDIF();
14832 IEM_MC_ADVANCE_RIP();
14833
14834 IEM_MC_END();
14835 return VINF_SUCCESS;
14836}
14837
14838
14839/** Opcode 0xdb 11/0. */
14840FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14841{
14842 IEMOP_MNEMONIC("fcmovnb st0,stN");
14843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14844
14845 IEM_MC_BEGIN(0, 1);
14846 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14847
14848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14850
14851 IEM_MC_PREPARE_FPU_USAGE();
14852 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14853 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14854 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14855 IEM_MC_ENDIF();
14856 IEM_MC_UPDATE_FPU_OPCODE_IP();
14857 IEM_MC_ELSE()
14858 IEM_MC_FPU_STACK_UNDERFLOW(0);
14859 IEM_MC_ENDIF();
14860 IEM_MC_ADVANCE_RIP();
14861
14862 IEM_MC_END();
14863 return VINF_SUCCESS;
14864}
14865
14866
14867/** Opcode 0xdb 11/1. */
14868FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14869{
14870 IEMOP_MNEMONIC("fcmovne st0,stN");
14871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14872
14873 IEM_MC_BEGIN(0, 1);
14874 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14875
14876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14877 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14878
14879 IEM_MC_PREPARE_FPU_USAGE();
14880 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14881 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14882 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14883 IEM_MC_ENDIF();
14884 IEM_MC_UPDATE_FPU_OPCODE_IP();
14885 IEM_MC_ELSE()
14886 IEM_MC_FPU_STACK_UNDERFLOW(0);
14887 IEM_MC_ENDIF();
14888 IEM_MC_ADVANCE_RIP();
14889
14890 IEM_MC_END();
14891 return VINF_SUCCESS;
14892}
14893
14894
14895/** Opcode 0xdb 11/2. */
14896FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14897{
14898 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14900
14901 IEM_MC_BEGIN(0, 1);
14902 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14903
14904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14906
14907 IEM_MC_PREPARE_FPU_USAGE();
14908 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14909 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14910 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14911 IEM_MC_ENDIF();
14912 IEM_MC_UPDATE_FPU_OPCODE_IP();
14913 IEM_MC_ELSE()
14914 IEM_MC_FPU_STACK_UNDERFLOW(0);
14915 IEM_MC_ENDIF();
14916 IEM_MC_ADVANCE_RIP();
14917
14918 IEM_MC_END();
14919 return VINF_SUCCESS;
14920}
14921
14922
14923/** Opcode 0xdb 11/3. */
14924FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14925{
14926 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14928
14929 IEM_MC_BEGIN(0, 1);
14930 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14931
14932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14934
14935 IEM_MC_PREPARE_FPU_USAGE();
14936 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14937 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14938 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14939 IEM_MC_ENDIF();
14940 IEM_MC_UPDATE_FPU_OPCODE_IP();
14941 IEM_MC_ELSE()
14942 IEM_MC_FPU_STACK_UNDERFLOW(0);
14943 IEM_MC_ENDIF();
14944 IEM_MC_ADVANCE_RIP();
14945
14946 IEM_MC_END();
14947 return VINF_SUCCESS;
14948}
14949
14950
14951/** Opcode 0xdb 0xe0. */
14952FNIEMOP_DEF(iemOp_fneni)
14953{
14954 IEMOP_MNEMONIC("fneni (8087/ign)");
14955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14956 IEM_MC_BEGIN(0,0);
14957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14958 IEM_MC_ADVANCE_RIP();
14959 IEM_MC_END();
14960 return VINF_SUCCESS;
14961}
14962
14963
14964/** Opcode 0xdb 0xe1. */
14965FNIEMOP_DEF(iemOp_fndisi)
14966{
14967 IEMOP_MNEMONIC("fndisi (8087/ign)");
14968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14969 IEM_MC_BEGIN(0,0);
14970 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14971 IEM_MC_ADVANCE_RIP();
14972 IEM_MC_END();
14973 return VINF_SUCCESS;
14974}
14975
14976
14977/** Opcode 0xdb 0xe2. */
14978FNIEMOP_DEF(iemOp_fnclex)
14979{
14980 IEMOP_MNEMONIC("fnclex");
14981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14982
14983 IEM_MC_BEGIN(0,0);
14984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14985 IEM_MC_CLEAR_FSW_EX();
14986 IEM_MC_ADVANCE_RIP();
14987 IEM_MC_END();
14988 return VINF_SUCCESS;
14989}
14990
14991
14992/** Opcode 0xdb 0xe3. */
14993FNIEMOP_DEF(iemOp_fninit)
14994{
14995 IEMOP_MNEMONIC("fninit");
14996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14997 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14998}
14999
15000
15001/** Opcode 0xdb 0xe4. */
15002FNIEMOP_DEF(iemOp_fnsetpm)
15003{
15004 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15006 IEM_MC_BEGIN(0,0);
15007 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15008 IEM_MC_ADVANCE_RIP();
15009 IEM_MC_END();
15010 return VINF_SUCCESS;
15011}
15012
15013
15014/** Opcode 0xdb 0xe5. */
15015FNIEMOP_DEF(iemOp_frstpm)
15016{
15017 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15018#if 0 /* #UDs on newer CPUs */
15019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15020 IEM_MC_BEGIN(0,0);
15021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15022 IEM_MC_ADVANCE_RIP();
15023 IEM_MC_END();
15024 return VINF_SUCCESS;
15025#else
15026 return IEMOP_RAISE_INVALID_OPCODE();
15027#endif
15028}
15029
15030
15031/** Opcode 0xdb 11/5. */
15032FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15033{
15034 IEMOP_MNEMONIC("fucomi st0,stN");
15035 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15036}
15037
15038
15039/** Opcode 0xdb 11/6. */
15040FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15041{
15042 IEMOP_MNEMONIC("fcomi st0,stN");
15043 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15044}
15045
15046
15047/** Opcode 0xdb. */
15048FNIEMOP_DEF(iemOp_EscF3)
15049{
15050 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15053 {
15054 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15055 {
15056 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15057 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15058 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15059 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15060 case 4:
15061 switch (bRm)
15062 {
15063 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15064 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15065 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15066 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15067 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15068 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15069 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15070 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15072 }
15073 break;
15074 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15075 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15076 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15078 }
15079 }
15080 else
15081 {
15082 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15083 {
15084 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15085 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15086 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15087 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15088 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15089 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15090 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15091 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15093 }
15094 }
15095}
15096
15097
15098/**
15099 * Common worker for FPU instructions working on STn and ST0, and storing the
15100 * result in STn unless IE, DE or ZE was raised.
15101 *
15102 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15103 */
15104FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15105{
15106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15107
15108 IEM_MC_BEGIN(3, 1);
15109 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15110 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15111 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15113
15114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15116
15117 IEM_MC_PREPARE_FPU_USAGE();
15118 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15119 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15120 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15121 IEM_MC_ELSE()
15122 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15123 IEM_MC_ENDIF();
15124 IEM_MC_ADVANCE_RIP();
15125
15126 IEM_MC_END();
15127 return VINF_SUCCESS;
15128}
15129
15130
15131/** Opcode 0xdc 11/0. */
15132FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15133{
15134 IEMOP_MNEMONIC("fadd stN,st0");
15135 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15136}
15137
15138
15139/** Opcode 0xdc 11/1. */
15140FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15141{
15142 IEMOP_MNEMONIC("fmul stN,st0");
15143 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15144}
15145
15146
15147/** Opcode 0xdc 11/4. */
15148FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15149{
15150 IEMOP_MNEMONIC("fsubr stN,st0");
15151 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15152}
15153
15154
15155/** Opcode 0xdc 11/5. */
15156FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15157{
15158 IEMOP_MNEMONIC("fsub stN,st0");
15159 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15160}
15161
15162
15163/** Opcode 0xdc 11/6. */
15164FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15165{
15166 IEMOP_MNEMONIC("fdivr stN,st0");
15167 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15168}
15169
15170
15171/** Opcode 0xdc 11/7. */
15172FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15173{
15174 IEMOP_MNEMONIC("fdiv stN,st0");
15175 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15176}
15177
15178
15179/**
15180 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15181 * memory operand, and storing the result in ST0.
15182 *
15183 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15184 */
15185FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15186{
15187 IEM_MC_BEGIN(3, 3);
15188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15189 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15190 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15191 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15192 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15193 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15194
15195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15199
15200 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15201 IEM_MC_PREPARE_FPU_USAGE();
15202 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15203 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15204 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15205 IEM_MC_ELSE()
15206 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15207 IEM_MC_ENDIF();
15208 IEM_MC_ADVANCE_RIP();
15209
15210 IEM_MC_END();
15211 return VINF_SUCCESS;
15212}
15213
15214
15215/** Opcode 0xdc !11/0. */
15216FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15217{
15218 IEMOP_MNEMONIC("fadd m64r");
15219 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15220}
15221
15222
15223/** Opcode 0xdc !11/1. */
15224FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15225{
15226 IEMOP_MNEMONIC("fmul m64r");
15227 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15228}
15229
15230
15231/** Opcode 0xdc !11/2. */
15232FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15233{
15234 IEMOP_MNEMONIC("fcom st0,m64r");
15235
15236 IEM_MC_BEGIN(3, 3);
15237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15238 IEM_MC_LOCAL(uint16_t, u16Fsw);
15239 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15240 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15241 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15242 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15243
15244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15246
15247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15249 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15250
15251 IEM_MC_PREPARE_FPU_USAGE();
15252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15253 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15254 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15255 IEM_MC_ELSE()
15256 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15257 IEM_MC_ENDIF();
15258 IEM_MC_ADVANCE_RIP();
15259
15260 IEM_MC_END();
15261 return VINF_SUCCESS;
15262}
15263
15264
15265/** Opcode 0xdc !11/3. */
15266FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15267{
15268 IEMOP_MNEMONIC("fcomp st0,m64r");
15269
15270 IEM_MC_BEGIN(3, 3);
15271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15272 IEM_MC_LOCAL(uint16_t, u16Fsw);
15273 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15274 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15275 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15276 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15277
15278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15280
15281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15283 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15284
15285 IEM_MC_PREPARE_FPU_USAGE();
15286 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15287 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15288 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15289 IEM_MC_ELSE()
15290 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15291 IEM_MC_ENDIF();
15292 IEM_MC_ADVANCE_RIP();
15293
15294 IEM_MC_END();
15295 return VINF_SUCCESS;
15296}
15297
15298
15299/** Opcode 0xdc !11/4. */
15300FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15301{
15302 IEMOP_MNEMONIC("fsub m64r");
15303 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15304}
15305
15306
15307/** Opcode 0xdc !11/5. */
15308FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15309{
15310 IEMOP_MNEMONIC("fsubr m64r");
15311 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15312}
15313
15314
15315/** Opcode 0xdc !11/6. */
15316FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15317{
15318 IEMOP_MNEMONIC("fdiv m64r");
15319 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15320}
15321
15322
15323/** Opcode 0xdc !11/7. */
15324FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15325{
15326 IEMOP_MNEMONIC("fdivr m64r");
15327 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15328}
15329
15330
15331/** Opcode 0xdc. */
15332FNIEMOP_DEF(iemOp_EscF4)
15333{
15334 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15337 {
15338 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15339 {
15340 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15341 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15342 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15343 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15344 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15345 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15346 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15347 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15349 }
15350 }
15351 else
15352 {
15353 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15354 {
15355 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15356 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15357 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15358 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15359 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15360 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15361 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15362 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15364 }
15365 }
15366}
15367
15368
15369/** Opcode 0xdd !11/0.
15370 * @sa iemOp_fld_m32r */
15371FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15372{
15373 IEMOP_MNEMONIC("fld m64r");
15374
15375 IEM_MC_BEGIN(2, 3);
15376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15377 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15378 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15379 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15380 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15381
15382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15384 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15385 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15386
15387 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15388 IEM_MC_PREPARE_FPU_USAGE();
15389 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15390 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15391 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15392 IEM_MC_ELSE()
15393 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15394 IEM_MC_ENDIF();
15395 IEM_MC_ADVANCE_RIP();
15396
15397 IEM_MC_END();
15398 return VINF_SUCCESS;
15399}
15400
15401
15402/** Opcode 0xdd !11/0. */
15403FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15404{
15405 IEMOP_MNEMONIC("fisttp m64i");
15406 IEM_MC_BEGIN(3, 2);
15407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15408 IEM_MC_LOCAL(uint16_t, u16Fsw);
15409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15410 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15412
15413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15417
15418 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15419 IEM_MC_PREPARE_FPU_USAGE();
15420 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15421 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15422 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15423 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15424 IEM_MC_ELSE()
15425 IEM_MC_IF_FCW_IM()
15426 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15427 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15428 IEM_MC_ENDIF();
15429 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15430 IEM_MC_ENDIF();
15431 IEM_MC_ADVANCE_RIP();
15432
15433 IEM_MC_END();
15434 return VINF_SUCCESS;
15435}
15436
15437
15438/** Opcode 0xdd !11/0. */
15439FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15440{
15441 IEMOP_MNEMONIC("fst m64r");
15442 IEM_MC_BEGIN(3, 2);
15443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15444 IEM_MC_LOCAL(uint16_t, u16Fsw);
15445 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15446 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15447 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15448
15449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15451 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15452 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15453
15454 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15455 IEM_MC_PREPARE_FPU_USAGE();
15456 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15457 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15458 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15459 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15460 IEM_MC_ELSE()
15461 IEM_MC_IF_FCW_IM()
15462 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15463 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15464 IEM_MC_ENDIF();
15465 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15466 IEM_MC_ENDIF();
15467 IEM_MC_ADVANCE_RIP();
15468
15469 IEM_MC_END();
15470 return VINF_SUCCESS;
15471}
15472
15473
15474
15475
15476/** Opcode 0xdd !11/0. */
15477FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15478{
15479 IEMOP_MNEMONIC("fstp m64r");
15480 IEM_MC_BEGIN(3, 2);
15481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15482 IEM_MC_LOCAL(uint16_t, u16Fsw);
15483 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15484 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15485 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15486
15487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15491
15492 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15493 IEM_MC_PREPARE_FPU_USAGE();
15494 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15495 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15496 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15497 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15498 IEM_MC_ELSE()
15499 IEM_MC_IF_FCW_IM()
15500 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15501 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15502 IEM_MC_ENDIF();
15503 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15504 IEM_MC_ENDIF();
15505 IEM_MC_ADVANCE_RIP();
15506
15507 IEM_MC_END();
15508 return VINF_SUCCESS;
15509}
15510
15511
15512/** Opcode 0xdd !11/0. */
15513FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15514{
15515 IEMOP_MNEMONIC("frstor m94/108byte");
15516 IEM_MC_BEGIN(3, 0);
15517 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15518 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15519 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15523 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15524 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15525 IEM_MC_END();
15526 return VINF_SUCCESS;
15527}
15528
15529
15530/** Opcode 0xdd !11/0. */
15531FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15532{
15533 IEMOP_MNEMONIC("fnsave m94/108byte");
15534 IEM_MC_BEGIN(3, 0);
15535 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15536 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15537 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15541 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15542 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15543 IEM_MC_END();
15544 return VINF_SUCCESS;
15545
15546}
15547
15548/** Opcode 0xdd !11/0. */
15549FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15550{
15551 IEMOP_MNEMONIC("fnstsw m16");
15552
15553 IEM_MC_BEGIN(0, 2);
15554 IEM_MC_LOCAL(uint16_t, u16Tmp);
15555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15556
15557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15559 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15560
15561 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15562 IEM_MC_FETCH_FSW(u16Tmp);
15563 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15564 IEM_MC_ADVANCE_RIP();
15565
15566/** @todo Debug / drop a hint to the verifier that things may differ
15567 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15568 * NT4SP1. (X86_FSW_PE) */
15569 IEM_MC_END();
15570 return VINF_SUCCESS;
15571}
15572
15573
15574/** Opcode 0xdd 11/0. */
15575FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15576{
15577 IEMOP_MNEMONIC("ffree stN");
15578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15579 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15580 unmodified. */
15581
15582 IEM_MC_BEGIN(0, 0);
15583
15584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15586
15587 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15588 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15589 IEM_MC_UPDATE_FPU_OPCODE_IP();
15590
15591 IEM_MC_ADVANCE_RIP();
15592 IEM_MC_END();
15593 return VINF_SUCCESS;
15594}
15595
15596
15597/** Opcode 0xdd 11/1. */
15598FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15599{
15600 IEMOP_MNEMONIC("fst st0,stN");
15601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15602
15603 IEM_MC_BEGIN(0, 2);
15604 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15605 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15608
15609 IEM_MC_PREPARE_FPU_USAGE();
15610 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15611 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15612 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15613 IEM_MC_ELSE()
15614 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15615 IEM_MC_ENDIF();
15616
15617 IEM_MC_ADVANCE_RIP();
15618 IEM_MC_END();
15619 return VINF_SUCCESS;
15620}
15621
15622
15623/** Opcode 0xdd 11/3. */
15624FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15625{
15626 IEMOP_MNEMONIC("fcom st0,stN");
15627 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15628}
15629
15630
15631/** Opcode 0xdd 11/4. */
15632FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15633{
15634 IEMOP_MNEMONIC("fcomp st0,stN");
15635 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15636}
15637
15638
15639/** Opcode 0xdd. */
15640FNIEMOP_DEF(iemOp_EscF5)
15641{
15642 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15644 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15645 {
15646 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15647 {
15648 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15649 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15650 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15651 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15652 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15653 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15654 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15655 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15657 }
15658 }
15659 else
15660 {
15661 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15662 {
15663 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15664 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15665 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15666 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15667 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15668 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15669 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15670 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15672 }
15673 }
15674}
15675
15676
15677/** Opcode 0xde 11/0. */
15678FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15679{
15680 IEMOP_MNEMONIC("faddp stN,st0");
15681 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15682}
15683
15684
15685/** Opcode 0xde 11/0. */
15686FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15687{
15688 IEMOP_MNEMONIC("fmulp stN,st0");
15689 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15690}
15691
15692
15693/** Opcode 0xde 0xd9. */
15694FNIEMOP_DEF(iemOp_fcompp)
15695{
15696 IEMOP_MNEMONIC("fucompp st0,stN");
15697 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15698}
15699
15700
15701/** Opcode 0xde 11/4. */
15702FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15703{
15704 IEMOP_MNEMONIC("fsubrp stN,st0");
15705 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15706}
15707
15708
15709/** Opcode 0xde 11/5. */
15710FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15711{
15712 IEMOP_MNEMONIC("fsubp stN,st0");
15713 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15714}
15715
15716
15717/** Opcode 0xde 11/6. */
15718FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15719{
15720 IEMOP_MNEMONIC("fdivrp stN,st0");
15721 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15722}
15723
15724
15725/** Opcode 0xde 11/7. */
15726FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15727{
15728 IEMOP_MNEMONIC("fdivp stN,st0");
15729 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15730}
15731
15732
15733/**
15734 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15735 * the result in ST0.
15736 *
15737 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15738 */
15739FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15740{
15741 IEM_MC_BEGIN(3, 3);
15742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15743 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15744 IEM_MC_LOCAL(int16_t, i16Val2);
15745 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15746 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15747 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15748
15749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15751
15752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15754 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15755
15756 IEM_MC_PREPARE_FPU_USAGE();
15757 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15758 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15759 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15760 IEM_MC_ELSE()
15761 IEM_MC_FPU_STACK_UNDERFLOW(0);
15762 IEM_MC_ENDIF();
15763 IEM_MC_ADVANCE_RIP();
15764
15765 IEM_MC_END();
15766 return VINF_SUCCESS;
15767}
15768
15769
15770/** Opcode 0xde !11/0. */
15771FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15772{
15773 IEMOP_MNEMONIC("fiadd m16i");
15774 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15775}
15776
15777
15778/** Opcode 0xde !11/1. */
15779FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15780{
15781 IEMOP_MNEMONIC("fimul m16i");
15782 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15783}
15784
15785
15786/** Opcode 0xde !11/2. */
15787FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15788{
15789 IEMOP_MNEMONIC("ficom st0,m16i");
15790
15791 IEM_MC_BEGIN(3, 3);
15792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15793 IEM_MC_LOCAL(uint16_t, u16Fsw);
15794 IEM_MC_LOCAL(int16_t, i16Val2);
15795 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15796 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15797 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15798
15799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15801
15802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15804 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15805
15806 IEM_MC_PREPARE_FPU_USAGE();
15807 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15808 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15809 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15810 IEM_MC_ELSE()
15811 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15812 IEM_MC_ENDIF();
15813 IEM_MC_ADVANCE_RIP();
15814
15815 IEM_MC_END();
15816 return VINF_SUCCESS;
15817}
15818
15819
15820/** Opcode 0xde !11/3. */
15821FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15822{
15823 IEMOP_MNEMONIC("ficomp st0,m16i");
15824
15825 IEM_MC_BEGIN(3, 3);
15826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15827 IEM_MC_LOCAL(uint16_t, u16Fsw);
15828 IEM_MC_LOCAL(int16_t, i16Val2);
15829 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15830 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15831 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15832
15833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15835
15836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15837 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15838 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15839
15840 IEM_MC_PREPARE_FPU_USAGE();
15841 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15842 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15843 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15844 IEM_MC_ELSE()
15845 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15846 IEM_MC_ENDIF();
15847 IEM_MC_ADVANCE_RIP();
15848
15849 IEM_MC_END();
15850 return VINF_SUCCESS;
15851}
15852
15853
15854/** Opcode 0xde !11/4. */
15855FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15856{
15857 IEMOP_MNEMONIC("fisub m16i");
15858 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15859}
15860
15861
15862/** Opcode 0xde !11/5. */
15863FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15864{
15865 IEMOP_MNEMONIC("fisubr m16i");
15866 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15867}
15868
15869
15870/** Opcode 0xde !11/6. */
15871FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15872{
15873 IEMOP_MNEMONIC("fiadd m16i");
15874 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15875}
15876
15877
15878/** Opcode 0xde !11/7. */
15879FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15880{
15881 IEMOP_MNEMONIC("fiadd m16i");
15882 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15883}
15884
15885
15886/** Opcode 0xde. */
15887FNIEMOP_DEF(iemOp_EscF6)
15888{
15889 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15892 {
15893 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15894 {
15895 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15896 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15897 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15898 case 3: if (bRm == 0xd9)
15899 return FNIEMOP_CALL(iemOp_fcompp);
15900 return IEMOP_RAISE_INVALID_OPCODE();
15901 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15902 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15903 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15904 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15906 }
15907 }
15908 else
15909 {
15910 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15911 {
15912 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15913 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15914 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15915 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15916 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15917 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15918 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15919 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15921 }
15922 }
15923}
15924
15925
15926/** Opcode 0xdf 11/0.
15927 * Undocument instruction, assumed to work like ffree + fincstp. */
15928FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15929{
15930 IEMOP_MNEMONIC("ffreep stN");
15931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15932
15933 IEM_MC_BEGIN(0, 0);
15934
15935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15937
15938 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15939 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15940 IEM_MC_FPU_STACK_INC_TOP();
15941 IEM_MC_UPDATE_FPU_OPCODE_IP();
15942
15943 IEM_MC_ADVANCE_RIP();
15944 IEM_MC_END();
15945 return VINF_SUCCESS;
15946}
15947
15948
15949/** Opcode 0xdf 0xe0. */
15950FNIEMOP_DEF(iemOp_fnstsw_ax)
15951{
15952 IEMOP_MNEMONIC("fnstsw ax");
15953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15954
15955 IEM_MC_BEGIN(0, 1);
15956 IEM_MC_LOCAL(uint16_t, u16Tmp);
15957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15958 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15959 IEM_MC_FETCH_FSW(u16Tmp);
15960 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15961 IEM_MC_ADVANCE_RIP();
15962 IEM_MC_END();
15963 return VINF_SUCCESS;
15964}
15965
15966
15967/** Opcode 0xdf 11/5. */
15968FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15969{
15970 IEMOP_MNEMONIC("fcomip st0,stN");
15971 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15972}
15973
15974
15975/** Opcode 0xdf 11/6. */
15976FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15977{
15978 IEMOP_MNEMONIC("fcomip st0,stN");
15979 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15980}
15981
15982
15983/** Opcode 0xdf !11/0. */
15984FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15985{
15986 IEMOP_MNEMONIC("fild m16i");
15987
15988 IEM_MC_BEGIN(2, 3);
15989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15990 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15991 IEM_MC_LOCAL(int16_t, i16Val);
15992 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15993 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15994
15995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15997
15998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16000 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16001
16002 IEM_MC_PREPARE_FPU_USAGE();
16003 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16004 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16005 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16006 IEM_MC_ELSE()
16007 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16008 IEM_MC_ENDIF();
16009 IEM_MC_ADVANCE_RIP();
16010
16011 IEM_MC_END();
16012 return VINF_SUCCESS;
16013}
16014
16015
16016/** Opcode 0xdf !11/1. */
16017FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16018{
16019 IEMOP_MNEMONIC("fisttp m16i");
16020 IEM_MC_BEGIN(3, 2);
16021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16022 IEM_MC_LOCAL(uint16_t, u16Fsw);
16023 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16024 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16025 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16026
16027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16031
16032 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16033 IEM_MC_PREPARE_FPU_USAGE();
16034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16035 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16036 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16037 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16038 IEM_MC_ELSE()
16039 IEM_MC_IF_FCW_IM()
16040 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16041 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16042 IEM_MC_ENDIF();
16043 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16044 IEM_MC_ENDIF();
16045 IEM_MC_ADVANCE_RIP();
16046
16047 IEM_MC_END();
16048 return VINF_SUCCESS;
16049}
16050
16051
16052/** Opcode 0xdf !11/2. */
16053FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16054{
16055 IEMOP_MNEMONIC("fistp m16i");
16056 IEM_MC_BEGIN(3, 2);
16057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16058 IEM_MC_LOCAL(uint16_t, u16Fsw);
16059 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16060 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16061 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16062
16063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16067
16068 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16069 IEM_MC_PREPARE_FPU_USAGE();
16070 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16071 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16072 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16073 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16074 IEM_MC_ELSE()
16075 IEM_MC_IF_FCW_IM()
16076 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16077 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16078 IEM_MC_ENDIF();
16079 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16080 IEM_MC_ENDIF();
16081 IEM_MC_ADVANCE_RIP();
16082
16083 IEM_MC_END();
16084 return VINF_SUCCESS;
16085}
16086
16087
16088/** Opcode 0xdf !11/3. */
16089FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16090{
16091 IEMOP_MNEMONIC("fistp m16i");
16092 IEM_MC_BEGIN(3, 2);
16093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16094 IEM_MC_LOCAL(uint16_t, u16Fsw);
16095 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16096 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16097 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16098
16099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16101 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16102 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16103
16104 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16105 IEM_MC_PREPARE_FPU_USAGE();
16106 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16107 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16108 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16109 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16110 IEM_MC_ELSE()
16111 IEM_MC_IF_FCW_IM()
16112 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16113 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16114 IEM_MC_ENDIF();
16115 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16116 IEM_MC_ENDIF();
16117 IEM_MC_ADVANCE_RIP();
16118
16119 IEM_MC_END();
16120 return VINF_SUCCESS;
16121}
16122
16123
16124/** Opcode 0xdf !11/4. */
16125FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16126
16127
16128/** Opcode 0xdf !11/5. */
16129FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16130{
16131 IEMOP_MNEMONIC("fild m64i");
16132
16133 IEM_MC_BEGIN(2, 3);
16134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16135 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16136 IEM_MC_LOCAL(int64_t, i64Val);
16137 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16138 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16139
16140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16142
16143 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16144 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16145 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16146
16147 IEM_MC_PREPARE_FPU_USAGE();
16148 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16149 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16150 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16151 IEM_MC_ELSE()
16152 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16153 IEM_MC_ENDIF();
16154 IEM_MC_ADVANCE_RIP();
16155
16156 IEM_MC_END();
16157 return VINF_SUCCESS;
16158}
16159
16160
16161/** Opcode 0xdf !11/6. */
16162FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16163
16164
16165/** Opcode 0xdf !11/7. */
16166FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16167{
16168 IEMOP_MNEMONIC("fistp m64i");
16169 IEM_MC_BEGIN(3, 2);
16170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16171 IEM_MC_LOCAL(uint16_t, u16Fsw);
16172 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16173 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16174 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16175
16176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16179 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16180
16181 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16182 IEM_MC_PREPARE_FPU_USAGE();
16183 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16184 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16185 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16186 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16187 IEM_MC_ELSE()
16188 IEM_MC_IF_FCW_IM()
16189 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16190 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16191 IEM_MC_ENDIF();
16192 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16193 IEM_MC_ENDIF();
16194 IEM_MC_ADVANCE_RIP();
16195
16196 IEM_MC_END();
16197 return VINF_SUCCESS;
16198}
16199
16200
16201/** Opcode 0xdf. */
16202FNIEMOP_DEF(iemOp_EscF7)
16203{
16204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16206 {
16207 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16208 {
16209 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16210 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16211 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16212 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16213 case 4: if (bRm == 0xe0)
16214 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16215 return IEMOP_RAISE_INVALID_OPCODE();
16216 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16217 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16218 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16220 }
16221 }
16222 else
16223 {
16224 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16225 {
16226 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16227 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16228 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16229 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16230 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16231 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16232 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16233 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16235 }
16236 }
16237}
16238
16239
16240/** Opcode 0xe0. */
16241FNIEMOP_DEF(iemOp_loopne_Jb)
16242{
16243 IEMOP_MNEMONIC("loopne Jb");
16244 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16245 IEMOP_HLP_NO_LOCK_PREFIX();
16246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16247
16248 switch (pIemCpu->enmEffAddrMode)
16249 {
16250 case IEMMODE_16BIT:
16251 IEM_MC_BEGIN(0,0);
16252 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16253 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16254 IEM_MC_REL_JMP_S8(i8Imm);
16255 } IEM_MC_ELSE() {
16256 IEM_MC_ADVANCE_RIP();
16257 } IEM_MC_ENDIF();
16258 IEM_MC_END();
16259 return VINF_SUCCESS;
16260
16261 case IEMMODE_32BIT:
16262 IEM_MC_BEGIN(0,0);
16263 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16264 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16265 IEM_MC_REL_JMP_S8(i8Imm);
16266 } IEM_MC_ELSE() {
16267 IEM_MC_ADVANCE_RIP();
16268 } IEM_MC_ENDIF();
16269 IEM_MC_END();
16270 return VINF_SUCCESS;
16271
16272 case IEMMODE_64BIT:
16273 IEM_MC_BEGIN(0,0);
16274 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16275 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16276 IEM_MC_REL_JMP_S8(i8Imm);
16277 } IEM_MC_ELSE() {
16278 IEM_MC_ADVANCE_RIP();
16279 } IEM_MC_ENDIF();
16280 IEM_MC_END();
16281 return VINF_SUCCESS;
16282
16283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16284 }
16285}
16286
16287
16288/** Opcode 0xe1. */
16289FNIEMOP_DEF(iemOp_loope_Jb)
16290{
16291 IEMOP_MNEMONIC("loope Jb");
16292 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16293 IEMOP_HLP_NO_LOCK_PREFIX();
16294 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16295
16296 switch (pIemCpu->enmEffAddrMode)
16297 {
16298 case IEMMODE_16BIT:
16299 IEM_MC_BEGIN(0,0);
16300 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16301 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16302 IEM_MC_REL_JMP_S8(i8Imm);
16303 } IEM_MC_ELSE() {
16304 IEM_MC_ADVANCE_RIP();
16305 } IEM_MC_ENDIF();
16306 IEM_MC_END();
16307 return VINF_SUCCESS;
16308
16309 case IEMMODE_32BIT:
16310 IEM_MC_BEGIN(0,0);
16311 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16312 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16313 IEM_MC_REL_JMP_S8(i8Imm);
16314 } IEM_MC_ELSE() {
16315 IEM_MC_ADVANCE_RIP();
16316 } IEM_MC_ENDIF();
16317 IEM_MC_END();
16318 return VINF_SUCCESS;
16319
16320 case IEMMODE_64BIT:
16321 IEM_MC_BEGIN(0,0);
16322 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16323 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16324 IEM_MC_REL_JMP_S8(i8Imm);
16325 } IEM_MC_ELSE() {
16326 IEM_MC_ADVANCE_RIP();
16327 } IEM_MC_ENDIF();
16328 IEM_MC_END();
16329 return VINF_SUCCESS;
16330
16331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16332 }
16333}
16334
16335
16336/** Opcode 0xe2. */
16337FNIEMOP_DEF(iemOp_loop_Jb)
16338{
16339 IEMOP_MNEMONIC("loop Jb");
16340 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16341 IEMOP_HLP_NO_LOCK_PREFIX();
16342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16343
16344 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16345 * using the 32-bit operand size override. How can that be restarted? See
16346 * weird pseudo code in intel manual. */
16347 switch (pIemCpu->enmEffAddrMode)
16348 {
16349 case IEMMODE_16BIT:
16350 IEM_MC_BEGIN(0,0);
16351 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16352 {
16353 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16354 IEM_MC_IF_CX_IS_NZ() {
16355 IEM_MC_REL_JMP_S8(i8Imm);
16356 } IEM_MC_ELSE() {
16357 IEM_MC_ADVANCE_RIP();
16358 } IEM_MC_ENDIF();
16359 }
16360 else
16361 {
16362 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16363 IEM_MC_ADVANCE_RIP();
16364 }
16365 IEM_MC_END();
16366 return VINF_SUCCESS;
16367
16368 case IEMMODE_32BIT:
16369 IEM_MC_BEGIN(0,0);
16370 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16371 {
16372 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16373 IEM_MC_IF_ECX_IS_NZ() {
16374 IEM_MC_REL_JMP_S8(i8Imm);
16375 } IEM_MC_ELSE() {
16376 IEM_MC_ADVANCE_RIP();
16377 } IEM_MC_ENDIF();
16378 }
16379 else
16380 {
16381 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16382 IEM_MC_ADVANCE_RIP();
16383 }
16384 IEM_MC_END();
16385 return VINF_SUCCESS;
16386
16387 case IEMMODE_64BIT:
16388 IEM_MC_BEGIN(0,0);
16389 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16390 {
16391 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16392 IEM_MC_IF_RCX_IS_NZ() {
16393 IEM_MC_REL_JMP_S8(i8Imm);
16394 } IEM_MC_ELSE() {
16395 IEM_MC_ADVANCE_RIP();
16396 } IEM_MC_ENDIF();
16397 }
16398 else
16399 {
16400 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16401 IEM_MC_ADVANCE_RIP();
16402 }
16403 IEM_MC_END();
16404 return VINF_SUCCESS;
16405
16406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16407 }
16408}
16409
16410
16411/** Opcode 0xe3. */
16412FNIEMOP_DEF(iemOp_jecxz_Jb)
16413{
16414 IEMOP_MNEMONIC("jecxz Jb");
16415 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16416 IEMOP_HLP_NO_LOCK_PREFIX();
16417 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16418
16419 switch (pIemCpu->enmEffAddrMode)
16420 {
16421 case IEMMODE_16BIT:
16422 IEM_MC_BEGIN(0,0);
16423 IEM_MC_IF_CX_IS_NZ() {
16424 IEM_MC_ADVANCE_RIP();
16425 } IEM_MC_ELSE() {
16426 IEM_MC_REL_JMP_S8(i8Imm);
16427 } IEM_MC_ENDIF();
16428 IEM_MC_END();
16429 return VINF_SUCCESS;
16430
16431 case IEMMODE_32BIT:
16432 IEM_MC_BEGIN(0,0);
16433 IEM_MC_IF_ECX_IS_NZ() {
16434 IEM_MC_ADVANCE_RIP();
16435 } IEM_MC_ELSE() {
16436 IEM_MC_REL_JMP_S8(i8Imm);
16437 } IEM_MC_ENDIF();
16438 IEM_MC_END();
16439 return VINF_SUCCESS;
16440
16441 case IEMMODE_64BIT:
16442 IEM_MC_BEGIN(0,0);
16443 IEM_MC_IF_RCX_IS_NZ() {
16444 IEM_MC_ADVANCE_RIP();
16445 } IEM_MC_ELSE() {
16446 IEM_MC_REL_JMP_S8(i8Imm);
16447 } IEM_MC_ENDIF();
16448 IEM_MC_END();
16449 return VINF_SUCCESS;
16450
16451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16452 }
16453}
16454
16455
16456/** Opcode 0xe4 */
16457FNIEMOP_DEF(iemOp_in_AL_Ib)
16458{
16459 IEMOP_MNEMONIC("in eAX,Ib");
16460 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16461 IEMOP_HLP_NO_LOCK_PREFIX();
16462 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16463}
16464
16465
16466/** Opcode 0xe5 */
16467FNIEMOP_DEF(iemOp_in_eAX_Ib)
16468{
16469 IEMOP_MNEMONIC("in eAX,Ib");
16470 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16471 IEMOP_HLP_NO_LOCK_PREFIX();
16472 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16473}
16474
16475
16476/** Opcode 0xe6 */
16477FNIEMOP_DEF(iemOp_out_Ib_AL)
16478{
16479 IEMOP_MNEMONIC("out Ib,AL");
16480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16481 IEMOP_HLP_NO_LOCK_PREFIX();
16482 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16483}
16484
16485
16486/** Opcode 0xe7 */
16487FNIEMOP_DEF(iemOp_out_Ib_eAX)
16488{
16489 IEMOP_MNEMONIC("out Ib,eAX");
16490 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16491 IEMOP_HLP_NO_LOCK_PREFIX();
16492 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16493}
16494
16495
16496/** Opcode 0xe8. */
16497FNIEMOP_DEF(iemOp_call_Jv)
16498{
16499 IEMOP_MNEMONIC("call Jv");
16500 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16501 switch (pIemCpu->enmEffOpSize)
16502 {
16503 case IEMMODE_16BIT:
16504 {
16505 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16506 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16507 }
16508
16509 case IEMMODE_32BIT:
16510 {
16511 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16513 }
16514
16515 case IEMMODE_64BIT:
16516 {
16517 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16518 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16519 }
16520
16521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16522 }
16523}
16524
16525
16526/** Opcode 0xe9. */
16527FNIEMOP_DEF(iemOp_jmp_Jv)
16528{
16529 IEMOP_MNEMONIC("jmp Jv");
16530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16531 switch (pIemCpu->enmEffOpSize)
16532 {
16533 case IEMMODE_16BIT:
16534 {
16535 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16536 IEM_MC_BEGIN(0, 0);
16537 IEM_MC_REL_JMP_S16(i16Imm);
16538 IEM_MC_END();
16539 return VINF_SUCCESS;
16540 }
16541
16542 case IEMMODE_64BIT:
16543 case IEMMODE_32BIT:
16544 {
16545 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16546 IEM_MC_BEGIN(0, 0);
16547 IEM_MC_REL_JMP_S32(i32Imm);
16548 IEM_MC_END();
16549 return VINF_SUCCESS;
16550 }
16551
16552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16553 }
16554}
16555
16556
16557/** Opcode 0xea. */
16558FNIEMOP_DEF(iemOp_jmp_Ap)
16559{
16560 IEMOP_MNEMONIC("jmp Ap");
16561 IEMOP_HLP_NO_64BIT();
16562
16563 /* Decode the far pointer address and pass it on to the far call C implementation. */
16564 uint32_t offSeg;
16565 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16566 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16567 else
16568 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16569 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16570 IEMOP_HLP_NO_LOCK_PREFIX();
16571 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16572}
16573
16574
16575/** Opcode 0xeb. */
16576FNIEMOP_DEF(iemOp_jmp_Jb)
16577{
16578 IEMOP_MNEMONIC("jmp Jb");
16579 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16580 IEMOP_HLP_NO_LOCK_PREFIX();
16581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16582
16583 IEM_MC_BEGIN(0, 0);
16584 IEM_MC_REL_JMP_S8(i8Imm);
16585 IEM_MC_END();
16586 return VINF_SUCCESS;
16587}
16588
16589
16590/** Opcode 0xec */
16591FNIEMOP_DEF(iemOp_in_AL_DX)
16592{
16593 IEMOP_MNEMONIC("in AL,DX");
16594 IEMOP_HLP_NO_LOCK_PREFIX();
16595 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16596}
16597
16598
16599/** Opcode 0xed */
16600FNIEMOP_DEF(iemOp_eAX_DX)
16601{
16602 IEMOP_MNEMONIC("in eAX,DX");
16603 IEMOP_HLP_NO_LOCK_PREFIX();
16604 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16605}
16606
16607
16608/** Opcode 0xee */
16609FNIEMOP_DEF(iemOp_out_DX_AL)
16610{
16611 IEMOP_MNEMONIC("out DX,AL");
16612 IEMOP_HLP_NO_LOCK_PREFIX();
16613 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16614}
16615
16616
16617/** Opcode 0xef */
16618FNIEMOP_DEF(iemOp_out_DX_eAX)
16619{
16620 IEMOP_MNEMONIC("out DX,eAX");
16621 IEMOP_HLP_NO_LOCK_PREFIX();
16622 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16623}
16624
16625
16626/** Opcode 0xf0. */
16627FNIEMOP_DEF(iemOp_lock)
16628{
16629 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16630 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16631
16632 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16633 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16634}
16635
16636
16637/** Opcode 0xf1. */
16638FNIEMOP_DEF(iemOp_int_1)
16639{
16640 IEMOP_MNEMONIC("int1"); /* icebp */
16641 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16642 /** @todo testcase! */
16643 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16644}
16645
16646
16647/** Opcode 0xf2. */
16648FNIEMOP_DEF(iemOp_repne)
16649{
16650 /* This overrides any previous REPE prefix. */
16651 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16652 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16653 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16654
16655 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16656 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16657}
16658
16659
16660/** Opcode 0xf3. */
16661FNIEMOP_DEF(iemOp_repe)
16662{
16663 /* This overrides any previous REPNE prefix. */
16664 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16665 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16666 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16667
16668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16670}
16671
16672
16673/** Opcode 0xf4. */
16674FNIEMOP_DEF(iemOp_hlt)
16675{
16676 IEMOP_HLP_NO_LOCK_PREFIX();
16677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16678}
16679
16680
16681/** Opcode 0xf5. */
16682FNIEMOP_DEF(iemOp_cmc)
16683{
16684 IEMOP_MNEMONIC("cmc");
16685 IEMOP_HLP_NO_LOCK_PREFIX();
16686 IEM_MC_BEGIN(0, 0);
16687 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16688 IEM_MC_ADVANCE_RIP();
16689 IEM_MC_END();
16690 return VINF_SUCCESS;
16691}
16692
16693
16694/**
16695 * Common implementation of 'inc/dec/not/neg Eb'.
16696 *
16697 * @param bRm The RM byte.
16698 * @param pImpl The instruction implementation.
16699 */
16700FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16701{
16702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16703 {
16704 /* register access */
16705 IEM_MC_BEGIN(2, 0);
16706 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16707 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16708 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16709 IEM_MC_REF_EFLAGS(pEFlags);
16710 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16711 IEM_MC_ADVANCE_RIP();
16712 IEM_MC_END();
16713 }
16714 else
16715 {
16716 /* memory access. */
16717 IEM_MC_BEGIN(2, 2);
16718 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16719 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16721
16722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16723 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16724 IEM_MC_FETCH_EFLAGS(EFlags);
16725 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16726 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16727 else
16728 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16729
16730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16731 IEM_MC_COMMIT_EFLAGS(EFlags);
16732 IEM_MC_ADVANCE_RIP();
16733 IEM_MC_END();
16734 }
16735 return VINF_SUCCESS;
16736}
16737
16738
16739/**
16740 * Common implementation of 'inc/dec/not/neg Ev'.
16741 *
16742 * @param bRm The RM byte.
16743 * @param pImpl The instruction implementation.
16744 */
16745FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16746{
16747 /* Registers are handled by a common worker. */
16748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16749 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16750
16751 /* Memory we do here. */
16752 switch (pIemCpu->enmEffOpSize)
16753 {
16754 case IEMMODE_16BIT:
16755 IEM_MC_BEGIN(2, 2);
16756 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16757 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16759
16760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16761 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16762 IEM_MC_FETCH_EFLAGS(EFlags);
16763 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16764 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16765 else
16766 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16767
16768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16769 IEM_MC_COMMIT_EFLAGS(EFlags);
16770 IEM_MC_ADVANCE_RIP();
16771 IEM_MC_END();
16772 return VINF_SUCCESS;
16773
16774 case IEMMODE_32BIT:
16775 IEM_MC_BEGIN(2, 2);
16776 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16779
16780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16781 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16782 IEM_MC_FETCH_EFLAGS(EFlags);
16783 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16784 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16785 else
16786 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16787
16788 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16789 IEM_MC_COMMIT_EFLAGS(EFlags);
16790 IEM_MC_ADVANCE_RIP();
16791 IEM_MC_END();
16792 return VINF_SUCCESS;
16793
16794 case IEMMODE_64BIT:
16795 IEM_MC_BEGIN(2, 2);
16796 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16799
16800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16801 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16802 IEM_MC_FETCH_EFLAGS(EFlags);
16803 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16804 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16805 else
16806 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16807
16808 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16809 IEM_MC_COMMIT_EFLAGS(EFlags);
16810 IEM_MC_ADVANCE_RIP();
16811 IEM_MC_END();
16812 return VINF_SUCCESS;
16813
16814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16815 }
16816}
16817
16818
16819/** Opcode 0xf6 /0. */
16820FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16821{
16822 IEMOP_MNEMONIC("test Eb,Ib");
16823 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16824
16825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16826 {
16827 /* register access */
16828 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16829 IEMOP_HLP_NO_LOCK_PREFIX();
16830
16831 IEM_MC_BEGIN(3, 0);
16832 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16833 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16835 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16836 IEM_MC_REF_EFLAGS(pEFlags);
16837 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16838 IEM_MC_ADVANCE_RIP();
16839 IEM_MC_END();
16840 }
16841 else
16842 {
16843 /* memory access. */
16844 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16845
16846 IEM_MC_BEGIN(3, 2);
16847 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16848 IEM_MC_ARG(uint8_t, u8Src, 1);
16849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16851
16852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16853 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16854 IEM_MC_ASSIGN(u8Src, u8Imm);
16855 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16856 IEM_MC_FETCH_EFLAGS(EFlags);
16857 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16858
16859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16860 IEM_MC_COMMIT_EFLAGS(EFlags);
16861 IEM_MC_ADVANCE_RIP();
16862 IEM_MC_END();
16863 }
16864 return VINF_SUCCESS;
16865}
16866
16867
16868/** Opcode 0xf7 /0. */
16869FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16870{
16871 IEMOP_MNEMONIC("test Ev,Iv");
16872 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16873 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16874
16875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16876 {
16877 /* register access */
16878 switch (pIemCpu->enmEffOpSize)
16879 {
16880 case IEMMODE_16BIT:
16881 {
16882 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16883 IEM_MC_BEGIN(3, 0);
16884 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16885 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16887 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16888 IEM_MC_REF_EFLAGS(pEFlags);
16889 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16890 IEM_MC_ADVANCE_RIP();
16891 IEM_MC_END();
16892 return VINF_SUCCESS;
16893 }
16894
16895 case IEMMODE_32BIT:
16896 {
16897 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16898 IEM_MC_BEGIN(3, 0);
16899 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16900 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16901 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16902 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16903 IEM_MC_REF_EFLAGS(pEFlags);
16904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16905 /* No clearing the high dword here - test doesn't write back the result. */
16906 IEM_MC_ADVANCE_RIP();
16907 IEM_MC_END();
16908 return VINF_SUCCESS;
16909 }
16910
16911 case IEMMODE_64BIT:
16912 {
16913 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16914 IEM_MC_BEGIN(3, 0);
16915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16916 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16918 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16919 IEM_MC_REF_EFLAGS(pEFlags);
16920 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16921 IEM_MC_ADVANCE_RIP();
16922 IEM_MC_END();
16923 return VINF_SUCCESS;
16924 }
16925
16926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16927 }
16928 }
16929 else
16930 {
16931 /* memory access. */
16932 switch (pIemCpu->enmEffOpSize)
16933 {
16934 case IEMMODE_16BIT:
16935 {
16936 IEM_MC_BEGIN(3, 2);
16937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16938 IEM_MC_ARG(uint16_t, u16Src, 1);
16939 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16941
16942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16943 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16944 IEM_MC_ASSIGN(u16Src, u16Imm);
16945 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16946 IEM_MC_FETCH_EFLAGS(EFlags);
16947 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16948
16949 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16950 IEM_MC_COMMIT_EFLAGS(EFlags);
16951 IEM_MC_ADVANCE_RIP();
16952 IEM_MC_END();
16953 return VINF_SUCCESS;
16954 }
16955
16956 case IEMMODE_32BIT:
16957 {
16958 IEM_MC_BEGIN(3, 2);
16959 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16960 IEM_MC_ARG(uint32_t, u32Src, 1);
16961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16963
16964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16965 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16966 IEM_MC_ASSIGN(u32Src, u32Imm);
16967 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16968 IEM_MC_FETCH_EFLAGS(EFlags);
16969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16970
16971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16972 IEM_MC_COMMIT_EFLAGS(EFlags);
16973 IEM_MC_ADVANCE_RIP();
16974 IEM_MC_END();
16975 return VINF_SUCCESS;
16976 }
16977
16978 case IEMMODE_64BIT:
16979 {
16980 IEM_MC_BEGIN(3, 2);
16981 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16982 IEM_MC_ARG(uint64_t, u64Src, 1);
16983 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16985
16986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16987 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16988 IEM_MC_ASSIGN(u64Src, u64Imm);
16989 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16990 IEM_MC_FETCH_EFLAGS(EFlags);
16991 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16992
16993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16994 IEM_MC_COMMIT_EFLAGS(EFlags);
16995 IEM_MC_ADVANCE_RIP();
16996 IEM_MC_END();
16997 return VINF_SUCCESS;
16998 }
16999
17000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17001 }
17002 }
17003}
17004
17005
17006/** Opcode 0xf6 /4, /5, /6 and /7. */
17007FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17008{
17009 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17010
17011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17012 {
17013 /* register access */
17014 IEMOP_HLP_NO_LOCK_PREFIX();
17015 IEM_MC_BEGIN(3, 1);
17016 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17017 IEM_MC_ARG(uint8_t, u8Value, 1);
17018 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17019 IEM_MC_LOCAL(int32_t, rc);
17020
17021 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17022 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17023 IEM_MC_REF_EFLAGS(pEFlags);
17024 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17025 IEM_MC_IF_LOCAL_IS_Z(rc) {
17026 IEM_MC_ADVANCE_RIP();
17027 } IEM_MC_ELSE() {
17028 IEM_MC_RAISE_DIVIDE_ERROR();
17029 } IEM_MC_ENDIF();
17030
17031 IEM_MC_END();
17032 }
17033 else
17034 {
17035 /* memory access. */
17036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17037
17038 IEM_MC_BEGIN(3, 2);
17039 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17040 IEM_MC_ARG(uint8_t, u8Value, 1);
17041 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17043 IEM_MC_LOCAL(int32_t, rc);
17044
17045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17046 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
17047 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17048 IEM_MC_REF_EFLAGS(pEFlags);
17049 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17050 IEM_MC_IF_LOCAL_IS_Z(rc) {
17051 IEM_MC_ADVANCE_RIP();
17052 } IEM_MC_ELSE() {
17053 IEM_MC_RAISE_DIVIDE_ERROR();
17054 } IEM_MC_ENDIF();
17055
17056 IEM_MC_END();
17057 }
17058 return VINF_SUCCESS;
17059}
17060
17061
17062/** Opcode 0xf7 /4, /5, /6 and /7. */
17063FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17064{
17065 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17067
17068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17069 {
17070 /* register access */
17071 switch (pIemCpu->enmEffOpSize)
17072 {
17073 case IEMMODE_16BIT:
17074 {
17075 IEMOP_HLP_NO_LOCK_PREFIX();
17076 IEM_MC_BEGIN(4, 1);
17077 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17078 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17079 IEM_MC_ARG(uint16_t, u16Value, 2);
17080 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17081 IEM_MC_LOCAL(int32_t, rc);
17082
17083 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17084 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17085 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17086 IEM_MC_REF_EFLAGS(pEFlags);
17087 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17088 IEM_MC_IF_LOCAL_IS_Z(rc) {
17089 IEM_MC_ADVANCE_RIP();
17090 } IEM_MC_ELSE() {
17091 IEM_MC_RAISE_DIVIDE_ERROR();
17092 } IEM_MC_ENDIF();
17093
17094 IEM_MC_END();
17095 return VINF_SUCCESS;
17096 }
17097
17098 case IEMMODE_32BIT:
17099 {
17100 IEMOP_HLP_NO_LOCK_PREFIX();
17101 IEM_MC_BEGIN(4, 1);
17102 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17103 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17104 IEM_MC_ARG(uint32_t, u32Value, 2);
17105 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17106 IEM_MC_LOCAL(int32_t, rc);
17107
17108 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17109 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17110 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17111 IEM_MC_REF_EFLAGS(pEFlags);
17112 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17113 IEM_MC_IF_LOCAL_IS_Z(rc) {
17114 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17115 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17116 IEM_MC_ADVANCE_RIP();
17117 } IEM_MC_ELSE() {
17118 IEM_MC_RAISE_DIVIDE_ERROR();
17119 } IEM_MC_ENDIF();
17120
17121 IEM_MC_END();
17122 return VINF_SUCCESS;
17123 }
17124
17125 case IEMMODE_64BIT:
17126 {
17127 IEMOP_HLP_NO_LOCK_PREFIX();
17128 IEM_MC_BEGIN(4, 1);
17129 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17130 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17131 IEM_MC_ARG(uint64_t, u64Value, 2);
17132 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17133 IEM_MC_LOCAL(int32_t, rc);
17134
17135 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17136 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17137 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17138 IEM_MC_REF_EFLAGS(pEFlags);
17139 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17140 IEM_MC_IF_LOCAL_IS_Z(rc) {
17141 IEM_MC_ADVANCE_RIP();
17142 } IEM_MC_ELSE() {
17143 IEM_MC_RAISE_DIVIDE_ERROR();
17144 } IEM_MC_ENDIF();
17145
17146 IEM_MC_END();
17147 return VINF_SUCCESS;
17148 }
17149
17150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17151 }
17152 }
17153 else
17154 {
17155 /* memory access. */
17156 switch (pIemCpu->enmEffOpSize)
17157 {
17158 case IEMMODE_16BIT:
17159 {
17160 IEMOP_HLP_NO_LOCK_PREFIX();
17161 IEM_MC_BEGIN(4, 2);
17162 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17163 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17164 IEM_MC_ARG(uint16_t, u16Value, 2);
17165 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17167 IEM_MC_LOCAL(int32_t, rc);
17168
17169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17170 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17171 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17172 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17173 IEM_MC_REF_EFLAGS(pEFlags);
17174 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17175 IEM_MC_IF_LOCAL_IS_Z(rc) {
17176 IEM_MC_ADVANCE_RIP();
17177 } IEM_MC_ELSE() {
17178 IEM_MC_RAISE_DIVIDE_ERROR();
17179 } IEM_MC_ENDIF();
17180
17181 IEM_MC_END();
17182 return VINF_SUCCESS;
17183 }
17184
17185 case IEMMODE_32BIT:
17186 {
17187 IEMOP_HLP_NO_LOCK_PREFIX();
17188 IEM_MC_BEGIN(4, 2);
17189 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17190 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17191 IEM_MC_ARG(uint32_t, u32Value, 2);
17192 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17194 IEM_MC_LOCAL(int32_t, rc);
17195
17196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17197 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17198 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17199 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17200 IEM_MC_REF_EFLAGS(pEFlags);
17201 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17202 IEM_MC_IF_LOCAL_IS_Z(rc) {
17203 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17204 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17205 IEM_MC_ADVANCE_RIP();
17206 } IEM_MC_ELSE() {
17207 IEM_MC_RAISE_DIVIDE_ERROR();
17208 } IEM_MC_ENDIF();
17209
17210 IEM_MC_END();
17211 return VINF_SUCCESS;
17212 }
17213
17214 case IEMMODE_64BIT:
17215 {
17216 IEMOP_HLP_NO_LOCK_PREFIX();
17217 IEM_MC_BEGIN(4, 2);
17218 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17219 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17220 IEM_MC_ARG(uint64_t, u64Value, 2);
17221 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17223 IEM_MC_LOCAL(int32_t, rc);
17224
17225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17226 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17227 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17228 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17229 IEM_MC_REF_EFLAGS(pEFlags);
17230 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17231 IEM_MC_IF_LOCAL_IS_Z(rc) {
17232 IEM_MC_ADVANCE_RIP();
17233 } IEM_MC_ELSE() {
17234 IEM_MC_RAISE_DIVIDE_ERROR();
17235 } IEM_MC_ENDIF();
17236
17237 IEM_MC_END();
17238 return VINF_SUCCESS;
17239 }
17240
17241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17242 }
17243 }
17244}
17245
17246/** Opcode 0xf6. */
17247FNIEMOP_DEF(iemOp_Grp3_Eb)
17248{
17249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17250 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17251 {
17252 case 0:
17253 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17254 case 1:
17255/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17256 return IEMOP_RAISE_INVALID_OPCODE();
17257 case 2:
17258 IEMOP_MNEMONIC("not Eb");
17259 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17260 case 3:
17261 IEMOP_MNEMONIC("neg Eb");
17262 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17263 case 4:
17264 IEMOP_MNEMONIC("mul Eb");
17265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17266 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17267 case 5:
17268 IEMOP_MNEMONIC("imul Eb");
17269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17270 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17271 case 6:
17272 IEMOP_MNEMONIC("div Eb");
17273 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17274 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17275 case 7:
17276 IEMOP_MNEMONIC("idiv Eb");
17277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17278 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17280 }
17281}
17282
17283
17284/** Opcode 0xf7. */
17285FNIEMOP_DEF(iemOp_Grp3_Ev)
17286{
17287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17288 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17289 {
17290 case 0:
17291 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17292 case 1:
17293/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17294 return IEMOP_RAISE_INVALID_OPCODE();
17295 case 2:
17296 IEMOP_MNEMONIC("not Ev");
17297 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17298 case 3:
17299 IEMOP_MNEMONIC("neg Ev");
17300 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17301 case 4:
17302 IEMOP_MNEMONIC("mul Ev");
17303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17304 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17305 case 5:
17306 IEMOP_MNEMONIC("imul Ev");
17307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17308 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17309 case 6:
17310 IEMOP_MNEMONIC("div Ev");
17311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17312 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17313 case 7:
17314 IEMOP_MNEMONIC("idiv Ev");
17315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17316 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17318 }
17319}
17320
17321
17322/** Opcode 0xf8. */
17323FNIEMOP_DEF(iemOp_clc)
17324{
17325 IEMOP_MNEMONIC("clc");
17326 IEMOP_HLP_NO_LOCK_PREFIX();
17327 IEM_MC_BEGIN(0, 0);
17328 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17329 IEM_MC_ADVANCE_RIP();
17330 IEM_MC_END();
17331 return VINF_SUCCESS;
17332}
17333
17334
17335/** Opcode 0xf9. */
17336FNIEMOP_DEF(iemOp_stc)
17337{
17338 IEMOP_MNEMONIC("stc");
17339 IEMOP_HLP_NO_LOCK_PREFIX();
17340 IEM_MC_BEGIN(0, 0);
17341 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17342 IEM_MC_ADVANCE_RIP();
17343 IEM_MC_END();
17344 return VINF_SUCCESS;
17345}
17346
17347
17348/** Opcode 0xfa. */
17349FNIEMOP_DEF(iemOp_cli)
17350{
17351 IEMOP_MNEMONIC("cli");
17352 IEMOP_HLP_NO_LOCK_PREFIX();
17353 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17354}
17355
17356
17357FNIEMOP_DEF(iemOp_sti)
17358{
17359 IEMOP_MNEMONIC("sti");
17360 IEMOP_HLP_NO_LOCK_PREFIX();
17361 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17362}
17363
17364
17365/** Opcode 0xfc. */
17366FNIEMOP_DEF(iemOp_cld)
17367{
17368 IEMOP_MNEMONIC("cld");
17369 IEMOP_HLP_NO_LOCK_PREFIX();
17370 IEM_MC_BEGIN(0, 0);
17371 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17372 IEM_MC_ADVANCE_RIP();
17373 IEM_MC_END();
17374 return VINF_SUCCESS;
17375}
17376
17377
17378/** Opcode 0xfd. */
17379FNIEMOP_DEF(iemOp_std)
17380{
17381 IEMOP_MNEMONIC("std");
17382 IEMOP_HLP_NO_LOCK_PREFIX();
17383 IEM_MC_BEGIN(0, 0);
17384 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17385 IEM_MC_ADVANCE_RIP();
17386 IEM_MC_END();
17387 return VINF_SUCCESS;
17388}
17389
17390
17391/** Opcode 0xfe. */
17392FNIEMOP_DEF(iemOp_Grp4)
17393{
17394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17395 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17396 {
17397 case 0:
17398 IEMOP_MNEMONIC("inc Ev");
17399 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17400 case 1:
17401 IEMOP_MNEMONIC("dec Ev");
17402 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17403 default:
17404 IEMOP_MNEMONIC("grp4-ud");
17405 return IEMOP_RAISE_INVALID_OPCODE();
17406 }
17407}
17408
17409
17410/**
17411 * Opcode 0xff /2.
17412 * @param bRm The RM byte.
17413 */
17414FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17415{
17416 IEMOP_MNEMONIC("calln Ev");
17417 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17419
17420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17421 {
17422 /* The new RIP is taken from a register. */
17423 switch (pIemCpu->enmEffOpSize)
17424 {
17425 case IEMMODE_16BIT:
17426 IEM_MC_BEGIN(1, 0);
17427 IEM_MC_ARG(uint16_t, u16Target, 0);
17428 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17429 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17430 IEM_MC_END()
17431 return VINF_SUCCESS;
17432
17433 case IEMMODE_32BIT:
17434 IEM_MC_BEGIN(1, 0);
17435 IEM_MC_ARG(uint32_t, u32Target, 0);
17436 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17437 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17438 IEM_MC_END()
17439 return VINF_SUCCESS;
17440
17441 case IEMMODE_64BIT:
17442 IEM_MC_BEGIN(1, 0);
17443 IEM_MC_ARG(uint64_t, u64Target, 0);
17444 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17445 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17446 IEM_MC_END()
17447 return VINF_SUCCESS;
17448
17449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17450 }
17451 }
17452 else
17453 {
17454 /* The new RIP is taken from a register. */
17455 switch (pIemCpu->enmEffOpSize)
17456 {
17457 case IEMMODE_16BIT:
17458 IEM_MC_BEGIN(1, 1);
17459 IEM_MC_ARG(uint16_t, u16Target, 0);
17460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17462 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17463 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17464 IEM_MC_END()
17465 return VINF_SUCCESS;
17466
17467 case IEMMODE_32BIT:
17468 IEM_MC_BEGIN(1, 1);
17469 IEM_MC_ARG(uint32_t, u32Target, 0);
17470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17472 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17473 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17474 IEM_MC_END()
17475 return VINF_SUCCESS;
17476
17477 case IEMMODE_64BIT:
17478 IEM_MC_BEGIN(1, 1);
17479 IEM_MC_ARG(uint64_t, u64Target, 0);
17480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17482 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17483 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17484 IEM_MC_END()
17485 return VINF_SUCCESS;
17486
17487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17488 }
17489 }
17490}
17491
17492typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17493
17494FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17495{
17496 /* Registers? How?? */
17497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17498 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17499
17500 /* Far pointer loaded from memory. */
17501 switch (pIemCpu->enmEffOpSize)
17502 {
17503 case IEMMODE_16BIT:
17504 IEM_MC_BEGIN(3, 1);
17505 IEM_MC_ARG(uint16_t, u16Sel, 0);
17506 IEM_MC_ARG(uint16_t, offSeg, 1);
17507 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17511 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17512 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17513 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17514 IEM_MC_END();
17515 return VINF_SUCCESS;
17516
17517 case IEMMODE_64BIT:
17518 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17519 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17520 * and call far qword [rsp] encodings. */
17521 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17522 {
17523 IEM_MC_BEGIN(3, 1);
17524 IEM_MC_ARG(uint16_t, u16Sel, 0);
17525 IEM_MC_ARG(uint64_t, offSeg, 1);
17526 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17530 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17531 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17532 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17533 IEM_MC_END();
17534 return VINF_SUCCESS;
17535 }
17536 /* AMD falls thru. */
17537
17538 case IEMMODE_32BIT:
17539 IEM_MC_BEGIN(3, 1);
17540 IEM_MC_ARG(uint16_t, u16Sel, 0);
17541 IEM_MC_ARG(uint32_t, offSeg, 1);
17542 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17546 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17547 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17548 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17549 IEM_MC_END();
17550 return VINF_SUCCESS;
17551
17552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17553 }
17554}
17555
17556
17557/**
17558 * Opcode 0xff /3.
17559 * @param bRm The RM byte.
17560 */
17561FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17562{
17563 IEMOP_MNEMONIC("callf Ep");
17564 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17565}
17566
17567
17568/**
17569 * Opcode 0xff /4.
17570 * @param bRm The RM byte.
17571 */
17572FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17573{
17574 IEMOP_MNEMONIC("jmpn Ev");
17575 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17577
17578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17579 {
17580 /* The new RIP is taken from a register. */
17581 switch (pIemCpu->enmEffOpSize)
17582 {
17583 case IEMMODE_16BIT:
17584 IEM_MC_BEGIN(0, 1);
17585 IEM_MC_LOCAL(uint16_t, u16Target);
17586 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17587 IEM_MC_SET_RIP_U16(u16Target);
17588 IEM_MC_END()
17589 return VINF_SUCCESS;
17590
17591 case IEMMODE_32BIT:
17592 IEM_MC_BEGIN(0, 1);
17593 IEM_MC_LOCAL(uint32_t, u32Target);
17594 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17595 IEM_MC_SET_RIP_U32(u32Target);
17596 IEM_MC_END()
17597 return VINF_SUCCESS;
17598
17599 case IEMMODE_64BIT:
17600 IEM_MC_BEGIN(0, 1);
17601 IEM_MC_LOCAL(uint64_t, u64Target);
17602 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17603 IEM_MC_SET_RIP_U64(u64Target);
17604 IEM_MC_END()
17605 return VINF_SUCCESS;
17606
17607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17608 }
17609 }
17610 else
17611 {
17612 /* The new RIP is taken from a memory location. */
17613 switch (pIemCpu->enmEffOpSize)
17614 {
17615 case IEMMODE_16BIT:
17616 IEM_MC_BEGIN(0, 2);
17617 IEM_MC_LOCAL(uint16_t, u16Target);
17618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17620 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17621 IEM_MC_SET_RIP_U16(u16Target);
17622 IEM_MC_END()
17623 return VINF_SUCCESS;
17624
17625 case IEMMODE_32BIT:
17626 IEM_MC_BEGIN(0, 2);
17627 IEM_MC_LOCAL(uint32_t, u32Target);
17628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17630 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17631 IEM_MC_SET_RIP_U32(u32Target);
17632 IEM_MC_END()
17633 return VINF_SUCCESS;
17634
17635 case IEMMODE_64BIT:
17636 IEM_MC_BEGIN(0, 2);
17637 IEM_MC_LOCAL(uint64_t, u64Target);
17638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17640 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17641 IEM_MC_SET_RIP_U64(u64Target);
17642 IEM_MC_END()
17643 return VINF_SUCCESS;
17644
17645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17646 }
17647 }
17648}
17649
17650
17651/**
17652 * Opcode 0xff /5.
17653 * @param bRm The RM byte.
17654 */
17655FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17656{
17657 IEMOP_MNEMONIC("jmpf Ep");
17658 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17659}
17660
17661
17662/**
17663 * Opcode 0xff /6.
17664 * @param bRm The RM byte.
17665 */
17666FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17667{
17668 IEMOP_MNEMONIC("push Ev");
17669 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17670
17671 /* Registers are handled by a common worker. */
17672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17673 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17674
17675 /* Memory we do here. */
17676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17677 switch (pIemCpu->enmEffOpSize)
17678 {
17679 case IEMMODE_16BIT:
17680 IEM_MC_BEGIN(0, 2);
17681 IEM_MC_LOCAL(uint16_t, u16Src);
17682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17684 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17685 IEM_MC_PUSH_U16(u16Src);
17686 IEM_MC_ADVANCE_RIP();
17687 IEM_MC_END();
17688 return VINF_SUCCESS;
17689
17690 case IEMMODE_32BIT:
17691 IEM_MC_BEGIN(0, 2);
17692 IEM_MC_LOCAL(uint32_t, u32Src);
17693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17695 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17696 IEM_MC_PUSH_U32(u32Src);
17697 IEM_MC_ADVANCE_RIP();
17698 IEM_MC_END();
17699 return VINF_SUCCESS;
17700
17701 case IEMMODE_64BIT:
17702 IEM_MC_BEGIN(0, 2);
17703 IEM_MC_LOCAL(uint64_t, u64Src);
17704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17706 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17707 IEM_MC_PUSH_U64(u64Src);
17708 IEM_MC_ADVANCE_RIP();
17709 IEM_MC_END();
17710 return VINF_SUCCESS;
17711
17712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17713 }
17714}
17715
17716
17717/** Opcode 0xff. */
17718FNIEMOP_DEF(iemOp_Grp5)
17719{
17720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17721 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17722 {
17723 case 0:
17724 IEMOP_MNEMONIC("inc Ev");
17725 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17726 case 1:
17727 IEMOP_MNEMONIC("dec Ev");
17728 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17729 case 2:
17730 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17731 case 3:
17732 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17733 case 4:
17734 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17735 case 5:
17736 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17737 case 6:
17738 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17739 case 7:
17740 IEMOP_MNEMONIC("grp5-ud");
17741 return IEMOP_RAISE_INVALID_OPCODE();
17742 }
17743 AssertFailedReturn(VERR_IEM_IPE_3);
17744}
17745
17746
17747
17748const PFNIEMOP g_apfnOneByteMap[256] =
17749{
17750 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17751 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17752 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17753 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17754 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17755 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17756 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17757 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17758 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17759 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17760 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17761 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17762 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17763 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17764 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17765 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17766 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17767 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17768 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17769 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17770 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17771 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17772 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17773 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17774 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17775 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17776 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17777 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17778 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17779 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17780 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17781 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17782 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17783 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17784 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17785 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17786 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17787 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17788 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17789 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17790 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17791 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17792 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17793 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17794 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17795 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17796 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17797 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17798 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17799 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17800 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17801 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17802 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17803 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17804 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17805 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17806 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17807 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17808 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17809 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17810 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17811 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17812 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17813 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17814};
17815
17816
17817/** @} */
17818
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette