VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61059

Last change on this file since 61059 was 61059, checked in by vboxsync, 9 years ago

IEM: a couple of new SSE instrs - currently disabled.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 603.2 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61059 2016-05-19 19:14:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1698{
1699 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1702 {
1703 /*
1704 * Register, register.
1705 */
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1707 IEM_MC_BEGIN(0, 0);
1708 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1710 else
1711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1712 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1713 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1714 IEM_MC_ADVANCE_RIP();
1715 IEM_MC_END();
1716 }
1717 else
1718 {
1719 /*
1720 * Register, memory.
1721 */
1722 IEM_MC_BEGIN(0, 2);
1723 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1725
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1728 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1730 else
1731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1732
1733 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1734 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1735
1736 IEM_MC_ADVANCE_RIP();
1737 IEM_MC_END();
1738 }
1739 return VINF_SUCCESS;
1740}
1741
1742
1743/** Opcode 0x0f 0x29. */
1744FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1745{
1746 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1749 {
1750 /*
1751 * Register, register.
1752 */
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1754 IEM_MC_BEGIN(0, 0);
1755 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1757 else
1758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1759 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1760 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1761 IEM_MC_ADVANCE_RIP();
1762 IEM_MC_END();
1763 }
1764 else
1765 {
1766 /*
1767 * Memory, register.
1768 */
1769 IEM_MC_BEGIN(0, 2);
1770 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1775 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1777 else
1778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1779
1780 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1781 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 return VINF_SUCCESS;
1787}
1788
1789
1790/** Opcode 0x0f 0x2a. */
1791FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1792
1793
1794/** Opcode 0x0f 0x2b. */
1795#ifndef VBOX_WITH_REM /** @todo figure out why this causes moderate regressions when enabled... Enabled for non-REM to hopefully make some headway there... */
1796FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1797{
1798 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1801 {
1802 /*
1803 * Register, memory.
1804 */
1805 IEM_MC_BEGIN(0, 2);
1806 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1808
1809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1810 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1811 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1812 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1813 else
1814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1815
1816 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1817 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1818
1819 IEM_MC_ADVANCE_RIP();
1820 IEM_MC_END();
1821 }
1822 /* The register, register encoding is invalid. */
1823 else
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825 return VINF_SUCCESS;
1826}
1827#else
1828FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1829#endif
1830
1831
1832/** Opcode 0x0f 0x2c. */
1833FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1834/** Opcode 0x0f 0x2d. */
1835FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1836/** Opcode 0x0f 0x2e. */
1837FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1838/** Opcode 0x0f 0x2f. */
1839FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1840
1841
1842/** Opcode 0x0f 0x30. */
1843FNIEMOP_DEF(iemOp_wrmsr)
1844{
1845 IEMOP_MNEMONIC("wrmsr");
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1848}
1849
1850
1851/** Opcode 0x0f 0x31. */
1852FNIEMOP_DEF(iemOp_rdtsc)
1853{
1854 IEMOP_MNEMONIC("rdtsc");
1855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1857}
1858
1859
1860/** Opcode 0x0f 0x33. */
1861FNIEMOP_DEF(iemOp_rdmsr)
1862{
1863 IEMOP_MNEMONIC("rdmsr");
1864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1866}
1867
1868
1869/** Opcode 0x0f 0x34. */
1870FNIEMOP_STUB(iemOp_rdpmc);
1871/** Opcode 0x0f 0x34. */
1872FNIEMOP_STUB(iemOp_sysenter);
1873/** Opcode 0x0f 0x35. */
1874FNIEMOP_STUB(iemOp_sysexit);
1875/** Opcode 0x0f 0x37. */
1876FNIEMOP_STUB(iemOp_getsec);
1877/** Opcode 0x0f 0x38. */
1878FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1879/** Opcode 0x0f 0x3a. */
1880FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1881
1882
1883/**
1884 * Implements a conditional move.
1885 *
1886 * Wish there was an obvious way to do this where we could share and reduce
1887 * code bloat.
1888 *
1889 * @param a_Cnd The conditional "microcode" operation.
1890 */
1891#define CMOV_X(a_Cnd) \
1892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1894 { \
1895 switch (pIemCpu->enmEffOpSize) \
1896 { \
1897 case IEMMODE_16BIT: \
1898 IEM_MC_BEGIN(0, 1); \
1899 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1900 a_Cnd { \
1901 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1902 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1903 } IEM_MC_ENDIF(); \
1904 IEM_MC_ADVANCE_RIP(); \
1905 IEM_MC_END(); \
1906 return VINF_SUCCESS; \
1907 \
1908 case IEMMODE_32BIT: \
1909 IEM_MC_BEGIN(0, 1); \
1910 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1911 a_Cnd { \
1912 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1913 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1914 } IEM_MC_ELSE() { \
1915 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1916 } IEM_MC_ENDIF(); \
1917 IEM_MC_ADVANCE_RIP(); \
1918 IEM_MC_END(); \
1919 return VINF_SUCCESS; \
1920 \
1921 case IEMMODE_64BIT: \
1922 IEM_MC_BEGIN(0, 1); \
1923 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1924 a_Cnd { \
1925 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1926 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1927 } IEM_MC_ENDIF(); \
1928 IEM_MC_ADVANCE_RIP(); \
1929 IEM_MC_END(); \
1930 return VINF_SUCCESS; \
1931 \
1932 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1933 } \
1934 } \
1935 else \
1936 { \
1937 switch (pIemCpu->enmEffOpSize) \
1938 { \
1939 case IEMMODE_16BIT: \
1940 IEM_MC_BEGIN(0, 2); \
1941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1942 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1944 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1945 a_Cnd { \
1946 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1947 } IEM_MC_ENDIF(); \
1948 IEM_MC_ADVANCE_RIP(); \
1949 IEM_MC_END(); \
1950 return VINF_SUCCESS; \
1951 \
1952 case IEMMODE_32BIT: \
1953 IEM_MC_BEGIN(0, 2); \
1954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1955 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1957 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1958 a_Cnd { \
1959 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1960 } IEM_MC_ELSE() { \
1961 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1962 } IEM_MC_ENDIF(); \
1963 IEM_MC_ADVANCE_RIP(); \
1964 IEM_MC_END(); \
1965 return VINF_SUCCESS; \
1966 \
1967 case IEMMODE_64BIT: \
1968 IEM_MC_BEGIN(0, 2); \
1969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1970 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1972 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1973 a_Cnd { \
1974 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1975 } IEM_MC_ENDIF(); \
1976 IEM_MC_ADVANCE_RIP(); \
1977 IEM_MC_END(); \
1978 return VINF_SUCCESS; \
1979 \
1980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1981 } \
1982 } do {} while (0)
1983
1984
1985
1986/** Opcode 0x0f 0x40. */
1987FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1988{
1989 IEMOP_MNEMONIC("cmovo Gv,Ev");
1990 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1991}
1992
1993
1994/** Opcode 0x0f 0x41. */
1995FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1996{
1997 IEMOP_MNEMONIC("cmovno Gv,Ev");
1998 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1999}
2000
2001
2002/** Opcode 0x0f 0x42. */
2003FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2004{
2005 IEMOP_MNEMONIC("cmovc Gv,Ev");
2006 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2007}
2008
2009
2010/** Opcode 0x0f 0x43. */
2011FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2012{
2013 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2014 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2015}
2016
2017
2018/** Opcode 0x0f 0x44. */
2019FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2020{
2021 IEMOP_MNEMONIC("cmove Gv,Ev");
2022 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2023}
2024
2025
2026/** Opcode 0x0f 0x45. */
2027FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2028{
2029 IEMOP_MNEMONIC("cmovne Gv,Ev");
2030 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2031}
2032
2033
2034/** Opcode 0x0f 0x46. */
2035FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2036{
2037 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2038 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2039}
2040
2041
2042/** Opcode 0x0f 0x47. */
2043FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2044{
2045 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2046 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2047}
2048
2049
2050/** Opcode 0x0f 0x48. */
2051FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2052{
2053 IEMOP_MNEMONIC("cmovs Gv,Ev");
2054 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2055}
2056
2057
2058/** Opcode 0x0f 0x49. */
2059FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2060{
2061 IEMOP_MNEMONIC("cmovns Gv,Ev");
2062 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2063}
2064
2065
2066/** Opcode 0x0f 0x4a. */
2067FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2068{
2069 IEMOP_MNEMONIC("cmovp Gv,Ev");
2070 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2071}
2072
2073
2074/** Opcode 0x0f 0x4b. */
2075FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2076{
2077 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2078 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2079}
2080
2081
2082/** Opcode 0x0f 0x4c. */
2083FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2084{
2085 IEMOP_MNEMONIC("cmovl Gv,Ev");
2086 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2087}
2088
2089
2090/** Opcode 0x0f 0x4d. */
2091FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2092{
2093 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2094 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2095}
2096
2097
2098/** Opcode 0x0f 0x4e. */
2099FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2100{
2101 IEMOP_MNEMONIC("cmovle Gv,Ev");
2102 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2103}
2104
2105
2106/** Opcode 0x0f 0x4f. */
2107FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2108{
2109 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2110 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2111}
2112
2113#undef CMOV_X
2114
2115/** Opcode 0x0f 0x50. */
2116FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2117/** Opcode 0x0f 0x51. */
2118FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2119/** Opcode 0x0f 0x52. */
2120FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2121/** Opcode 0x0f 0x53. */
2122FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2123/** Opcode 0x0f 0x54. */
2124FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2125/** Opcode 0x0f 0x55. */
2126FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2127/** Opcode 0x0f 0x56. */
2128FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2129/** Opcode 0x0f 0x57. */
2130FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2131/** Opcode 0x0f 0x58. */
2132FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2133/** Opcode 0x0f 0x59. */
2134FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2135/** Opcode 0x0f 0x5a. */
2136FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2137/** Opcode 0x0f 0x5b. */
2138FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2139/** Opcode 0x0f 0x5c. */
2140FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2141/** Opcode 0x0f 0x5d. */
2142FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2143/** Opcode 0x0f 0x5e. */
2144FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2145/** Opcode 0x0f 0x5f. */
2146FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2147
2148
2149/**
2150 * Common worker for SSE2 and MMX instructions on the forms:
2151 * pxxxx xmm1, xmm2/mem128
2152 * pxxxx mm1, mm2/mem32
2153 *
2154 * The 2nd operand is the first half of a register, which in the memory case
2155 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2156 * memory accessed for MMX.
2157 *
2158 * Exceptions type 4.
2159 */
2160FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2161{
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2164 {
2165 case IEM_OP_PRF_SIZE_OP: /* SSE */
2166 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2167 {
2168 /*
2169 * Register, register.
2170 */
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2172 IEM_MC_BEGIN(2, 0);
2173 IEM_MC_ARG(uint128_t *, pDst, 0);
2174 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2175 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2177 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2179 IEM_MC_ADVANCE_RIP();
2180 IEM_MC_END();
2181 }
2182 else
2183 {
2184 /*
2185 * Register, memory.
2186 */
2187 IEM_MC_BEGIN(2, 2);
2188 IEM_MC_ARG(uint128_t *, pDst, 0);
2189 IEM_MC_LOCAL(uint64_t, uSrc);
2190 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2192
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2196 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2197
2198 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2199 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2200
2201 IEM_MC_ADVANCE_RIP();
2202 IEM_MC_END();
2203 }
2204 return VINF_SUCCESS;
2205
2206 case 0: /* MMX */
2207 if (!pImpl->pfnU64)
2208 return IEMOP_RAISE_INVALID_OPCODE();
2209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2210 {
2211 /*
2212 * Register, register.
2213 */
2214 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2215 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2217 IEM_MC_BEGIN(2, 0);
2218 IEM_MC_ARG(uint64_t *, pDst, 0);
2219 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2222 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2224 IEM_MC_ADVANCE_RIP();
2225 IEM_MC_END();
2226 }
2227 else
2228 {
2229 /*
2230 * Register, memory.
2231 */
2232 IEM_MC_BEGIN(2, 2);
2233 IEM_MC_ARG(uint64_t *, pDst, 0);
2234 IEM_MC_LOCAL(uint32_t, uSrc);
2235 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2237
2238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2241 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2242
2243 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2244 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2245
2246 IEM_MC_ADVANCE_RIP();
2247 IEM_MC_END();
2248 }
2249 return VINF_SUCCESS;
2250
2251 default:
2252 return IEMOP_RAISE_INVALID_OPCODE();
2253 }
2254}
2255
2256
2257/** Opcode 0x0f 0x60. */
2258FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2259{
2260 IEMOP_MNEMONIC("punpcklbw");
2261 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2262}
2263
2264
2265/** Opcode 0x0f 0x61. */
2266FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2267{
2268 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2269 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2270}
2271
2272
2273/** Opcode 0x0f 0x62. */
2274FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2275{
2276 IEMOP_MNEMONIC("punpckldq");
2277 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2278}
2279
2280
2281/** Opcode 0x0f 0x63. */
2282FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2283/** Opcode 0x0f 0x64. */
2284FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2285/** Opcode 0x0f 0x65. */
2286FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2287/** Opcode 0x0f 0x66. */
2288FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2289/** Opcode 0x0f 0x67. */
2290FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2291
2292
2293/**
2294 * Common worker for SSE2 and MMX instructions on the forms:
2295 * pxxxx xmm1, xmm2/mem128
2296 * pxxxx mm1, mm2/mem64
2297 *
2298 * The 2nd operand is the second half of a register, which in the memory case
2299 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2300 * where it may read the full 128 bits or only the upper 64 bits.
2301 *
2302 * Exceptions type 4.
2303 */
2304FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2305{
2306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2307 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2308 {
2309 case IEM_OP_PRF_SIZE_OP: /* SSE */
2310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2316 IEM_MC_BEGIN(2, 0);
2317 IEM_MC_ARG(uint128_t *, pDst, 0);
2318 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2320 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2321 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2322 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2323 IEM_MC_ADVANCE_RIP();
2324 IEM_MC_END();
2325 }
2326 else
2327 {
2328 /*
2329 * Register, memory.
2330 */
2331 IEM_MC_BEGIN(2, 2);
2332 IEM_MC_ARG(uint128_t *, pDst, 0);
2333 IEM_MC_LOCAL(uint128_t, uSrc);
2334 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2340 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2341
2342 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2343 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2344
2345 IEM_MC_ADVANCE_RIP();
2346 IEM_MC_END();
2347 }
2348 return VINF_SUCCESS;
2349
2350 case 0: /* MMX */
2351 if (!pImpl->pfnU64)
2352 return IEMOP_RAISE_INVALID_OPCODE();
2353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2354 {
2355 /*
2356 * Register, register.
2357 */
2358 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2359 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint64_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2365 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2366 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2367 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2368 IEM_MC_ADVANCE_RIP();
2369 IEM_MC_END();
2370 }
2371 else
2372 {
2373 /*
2374 * Register, memory.
2375 */
2376 IEM_MC_BEGIN(2, 2);
2377 IEM_MC_ARG(uint64_t *, pDst, 0);
2378 IEM_MC_LOCAL(uint64_t, uSrc);
2379 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2381
2382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2384 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2385 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2386
2387 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2388 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2389
2390 IEM_MC_ADVANCE_RIP();
2391 IEM_MC_END();
2392 }
2393 return VINF_SUCCESS;
2394
2395 default:
2396 return IEMOP_RAISE_INVALID_OPCODE();
2397 }
2398}
2399
2400
2401/** Opcode 0x0f 0x68. */
2402FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2403{
2404 IEMOP_MNEMONIC("punpckhbw");
2405 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2406}
2407
2408
2409/** Opcode 0x0f 0x69. */
2410FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2411{
2412 IEMOP_MNEMONIC("punpckhwd");
2413 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2414}
2415
2416
2417/** Opcode 0x0f 0x6a. */
2418FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2419{
2420 IEMOP_MNEMONIC("punpckhdq");
2421 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2422}
2423
2424/** Opcode 0x0f 0x6b. */
2425FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2426
2427
2428/** Opcode 0x0f 0x6c. */
2429FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2430{
2431 IEMOP_MNEMONIC("punpcklqdq");
2432 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2433}
2434
2435
2436/** Opcode 0x0f 0x6d. */
2437FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2438{
2439 IEMOP_MNEMONIC("punpckhqdq");
2440 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2441}
2442
2443
2444/** Opcode 0x0f 0x6e. */
2445FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2446{
2447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2448 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2449 {
2450 case IEM_OP_PRF_SIZE_OP: /* SSE */
2451 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2453 {
2454 /* XMM, greg*/
2455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2456 IEM_MC_BEGIN(0, 1);
2457 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2458 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2459 {
2460 IEM_MC_LOCAL(uint64_t, u64Tmp);
2461 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2462 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2463 }
2464 else
2465 {
2466 IEM_MC_LOCAL(uint32_t, u32Tmp);
2467 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2468 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2469 }
2470 IEM_MC_ADVANCE_RIP();
2471 IEM_MC_END();
2472 }
2473 else
2474 {
2475 /* XMM, [mem] */
2476 IEM_MC_BEGIN(0, 2);
2477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2478 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2481 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2482 {
2483 IEM_MC_LOCAL(uint64_t, u64Tmp);
2484 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2485 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2486 }
2487 else
2488 {
2489 IEM_MC_LOCAL(uint32_t, u32Tmp);
2490 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2491 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2492 }
2493 IEM_MC_ADVANCE_RIP();
2494 IEM_MC_END();
2495 }
2496 return VINF_SUCCESS;
2497
2498 case 0: /* MMX */
2499 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2500 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2501 {
2502 /* MMX, greg */
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504 IEM_MC_BEGIN(0, 1);
2505 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2506 IEM_MC_LOCAL(uint64_t, u64Tmp);
2507 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2508 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2509 else
2510 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2511 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2512 IEM_MC_ADVANCE_RIP();
2513 IEM_MC_END();
2514 }
2515 else
2516 {
2517 /* MMX, [mem] */
2518 IEM_MC_BEGIN(0, 2);
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2520 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2524 {
2525 IEM_MC_LOCAL(uint64_t, u64Tmp);
2526 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2527 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2528 }
2529 else
2530 {
2531 IEM_MC_LOCAL(uint32_t, u32Tmp);
2532 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2533 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2534 }
2535 IEM_MC_ADVANCE_RIP();
2536 IEM_MC_END();
2537 }
2538 return VINF_SUCCESS;
2539
2540 default:
2541 return IEMOP_RAISE_INVALID_OPCODE();
2542 }
2543}
2544
2545
2546/** Opcode 0x0f 0x6f. */
2547FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2548{
2549 bool fAligned = false;
2550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2551 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2552 {
2553 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2554 fAligned = true;
2555 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2556 if (fAligned)
2557 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2558 else
2559 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2561 {
2562 /*
2563 * Register, register.
2564 */
2565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2566 IEM_MC_BEGIN(0, 1);
2567 IEM_MC_LOCAL(uint128_t, u128Tmp);
2568 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2569 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2570 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /*
2577 * Register, memory.
2578 */
2579 IEM_MC_BEGIN(0, 2);
2580 IEM_MC_LOCAL(uint128_t, u128Tmp);
2581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2582
2583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2586 if (fAligned)
2587 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2588 else
2589 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2590 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2591
2592 IEM_MC_ADVANCE_RIP();
2593 IEM_MC_END();
2594 }
2595 return VINF_SUCCESS;
2596
2597 case 0: /* MMX */
2598 IEMOP_MNEMONIC("movq Pq,Qq");
2599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2600 {
2601 /*
2602 * Register, register.
2603 */
2604 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2605 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2607 IEM_MC_BEGIN(0, 1);
2608 IEM_MC_LOCAL(uint64_t, u64Tmp);
2609 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2610 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2611 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2612 IEM_MC_ADVANCE_RIP();
2613 IEM_MC_END();
2614 }
2615 else
2616 {
2617 /*
2618 * Register, memory.
2619 */
2620 IEM_MC_BEGIN(0, 2);
2621 IEM_MC_LOCAL(uint64_t, u64Tmp);
2622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2623
2624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2626 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2627 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2628 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2629
2630 IEM_MC_ADVANCE_RIP();
2631 IEM_MC_END();
2632 }
2633 return VINF_SUCCESS;
2634
2635 default:
2636 return IEMOP_RAISE_INVALID_OPCODE();
2637 }
2638}
2639
2640
2641/** Opcode 0x0f 0x70. The immediate here is evil! */
2642FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 case IEM_OP_PRF_REPNZ: /* SSE */
2649 case IEM_OP_PRF_REPZ: /* SSE */
2650 {
2651 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2652 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2653 {
2654 case IEM_OP_PRF_SIZE_OP:
2655 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2656 pfnAImpl = iemAImpl_pshufd;
2657 break;
2658 case IEM_OP_PRF_REPNZ:
2659 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2660 pfnAImpl = iemAImpl_pshuflw;
2661 break;
2662 case IEM_OP_PRF_REPZ:
2663 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2664 pfnAImpl = iemAImpl_pshufhw;
2665 break;
2666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2667 }
2668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2669 {
2670 /*
2671 * Register, register.
2672 */
2673 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2675
2676 IEM_MC_BEGIN(3, 0);
2677 IEM_MC_ARG(uint128_t *, pDst, 0);
2678 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2679 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2680 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2681 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2682 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2683 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2684 IEM_MC_ADVANCE_RIP();
2685 IEM_MC_END();
2686 }
2687 else
2688 {
2689 /*
2690 * Register, memory.
2691 */
2692 IEM_MC_BEGIN(3, 2);
2693 IEM_MC_ARG(uint128_t *, pDst, 0);
2694 IEM_MC_LOCAL(uint128_t, uSrc);
2695 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2697
2698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2699 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2700 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2702 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2703
2704 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2705 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2706 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2707
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 return VINF_SUCCESS;
2712 }
2713
2714 case 0: /* MMX Extension */
2715 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2717 {
2718 /*
2719 * Register, register.
2720 */
2721 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723
2724 IEM_MC_BEGIN(3, 0);
2725 IEM_MC_ARG(uint64_t *, pDst, 0);
2726 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2727 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2728 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2729 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2730 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2731 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2732 IEM_MC_ADVANCE_RIP();
2733 IEM_MC_END();
2734 }
2735 else
2736 {
2737 /*
2738 * Register, memory.
2739 */
2740 IEM_MC_BEGIN(3, 2);
2741 IEM_MC_ARG(uint64_t *, pDst, 0);
2742 IEM_MC_LOCAL(uint64_t, uSrc);
2743 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2745
2746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2747 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2748 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2751
2752 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2753 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2754 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2755
2756 IEM_MC_ADVANCE_RIP();
2757 IEM_MC_END();
2758 }
2759 return VINF_SUCCESS;
2760
2761 default:
2762 return IEMOP_RAISE_INVALID_OPCODE();
2763 }
2764}
2765
2766
2767/** Opcode 0x0f 0x71 11/2. */
2768FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2769
2770/** Opcode 0x66 0x0f 0x71 11/2. */
2771FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2772
2773/** Opcode 0x0f 0x71 11/4. */
2774FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2775
2776/** Opcode 0x66 0x0f 0x71 11/4. */
2777FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2778
2779/** Opcode 0x0f 0x71 11/6. */
2780FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2781
2782/** Opcode 0x66 0x0f 0x71 11/6. */
2783FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2784
2785
2786/** Opcode 0x0f 0x71. */
2787FNIEMOP_DEF(iemOp_Grp12)
2788{
2789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2790 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2791 return IEMOP_RAISE_INVALID_OPCODE();
2792 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2793 {
2794 case 0: case 1: case 3: case 5: case 7:
2795 return IEMOP_RAISE_INVALID_OPCODE();
2796 case 2:
2797 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2798 {
2799 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2800 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2801 default: return IEMOP_RAISE_INVALID_OPCODE();
2802 }
2803 case 4:
2804 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2805 {
2806 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2807 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2808 default: return IEMOP_RAISE_INVALID_OPCODE();
2809 }
2810 case 6:
2811 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2812 {
2813 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2814 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2815 default: return IEMOP_RAISE_INVALID_OPCODE();
2816 }
2817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2818 }
2819}
2820
2821
2822/** Opcode 0x0f 0x72 11/2. */
2823FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2824
2825/** Opcode 0x66 0x0f 0x72 11/2. */
2826FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2827
2828/** Opcode 0x0f 0x72 11/4. */
2829FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2830
2831/** Opcode 0x66 0x0f 0x72 11/4. */
2832FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2833
2834/** Opcode 0x0f 0x72 11/6. */
2835FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2836
2837/** Opcode 0x66 0x0f 0x72 11/6. */
2838FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2839
2840
2841/** Opcode 0x0f 0x72. */
2842FNIEMOP_DEF(iemOp_Grp13)
2843{
2844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2845 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2848 {
2849 case 0: case 1: case 3: case 5: case 7:
2850 return IEMOP_RAISE_INVALID_OPCODE();
2851 case 2:
2852 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2853 {
2854 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2855 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2856 default: return IEMOP_RAISE_INVALID_OPCODE();
2857 }
2858 case 4:
2859 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2860 {
2861 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2862 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2863 default: return IEMOP_RAISE_INVALID_OPCODE();
2864 }
2865 case 6:
2866 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2867 {
2868 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2869 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2870 default: return IEMOP_RAISE_INVALID_OPCODE();
2871 }
2872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2873 }
2874}
2875
2876
2877/** Opcode 0x0f 0x73 11/2. */
2878FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2879
2880/** Opcode 0x66 0x0f 0x73 11/2. */
2881FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2882
2883/** Opcode 0x66 0x0f 0x73 11/3. */
2884FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2885
2886/** Opcode 0x0f 0x73 11/6. */
2887FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2888
2889/** Opcode 0x66 0x0f 0x73 11/6. */
2890FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2891
2892/** Opcode 0x66 0x0f 0x73 11/7. */
2893FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2894
2895
2896/** Opcode 0x0f 0x73. */
2897FNIEMOP_DEF(iemOp_Grp14)
2898{
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2901 return IEMOP_RAISE_INVALID_OPCODE();
2902 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2903 {
2904 case 0: case 1: case 4: case 5:
2905 return IEMOP_RAISE_INVALID_OPCODE();
2906 case 2:
2907 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2908 {
2909 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2910 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2911 default: return IEMOP_RAISE_INVALID_OPCODE();
2912 }
2913 case 3:
2914 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2915 {
2916 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2917 default: return IEMOP_RAISE_INVALID_OPCODE();
2918 }
2919 case 6:
2920 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2921 {
2922 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2923 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2924 default: return IEMOP_RAISE_INVALID_OPCODE();
2925 }
2926 case 7:
2927 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2928 {
2929 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2930 default: return IEMOP_RAISE_INVALID_OPCODE();
2931 }
2932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2933 }
2934}
2935
2936
2937/**
2938 * Common worker for SSE2 and MMX instructions on the forms:
2939 * pxxx mm1, mm2/mem64
2940 * pxxx xmm1, xmm2/mem128
2941 *
2942 * Proper alignment of the 128-bit operand is enforced.
2943 * Exceptions type 4. SSE2 and MMX cpuid checks.
2944 */
2945FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2946{
2947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2948 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2949 {
2950 case IEM_OP_PRF_SIZE_OP: /* SSE */
2951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2952 {
2953 /*
2954 * Register, register.
2955 */
2956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2957 IEM_MC_BEGIN(2, 0);
2958 IEM_MC_ARG(uint128_t *, pDst, 0);
2959 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2960 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2961 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2962 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2963 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 else
2968 {
2969 /*
2970 * Register, memory.
2971 */
2972 IEM_MC_BEGIN(2, 2);
2973 IEM_MC_ARG(uint128_t *, pDst, 0);
2974 IEM_MC_LOCAL(uint128_t, uSrc);
2975 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2977
2978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2980 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2981 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2982
2983 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2984 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2985
2986 IEM_MC_ADVANCE_RIP();
2987 IEM_MC_END();
2988 }
2989 return VINF_SUCCESS;
2990
2991 case 0: /* MMX */
2992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2993 {
2994 /*
2995 * Register, register.
2996 */
2997 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2998 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 IEM_MC_BEGIN(2, 0);
3001 IEM_MC_ARG(uint64_t *, pDst, 0);
3002 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3003 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3004 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3005 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3006 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 }
3010 else
3011 {
3012 /*
3013 * Register, memory.
3014 */
3015 IEM_MC_BEGIN(2, 2);
3016 IEM_MC_ARG(uint64_t *, pDst, 0);
3017 IEM_MC_LOCAL(uint64_t, uSrc);
3018 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3020
3021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3024 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3025
3026 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3027 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3028
3029 IEM_MC_ADVANCE_RIP();
3030 IEM_MC_END();
3031 }
3032 return VINF_SUCCESS;
3033
3034 default:
3035 return IEMOP_RAISE_INVALID_OPCODE();
3036 }
3037}
3038
3039
3040/** Opcode 0x0f 0x74. */
3041FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3042{
3043 IEMOP_MNEMONIC("pcmpeqb");
3044 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3045}
3046
3047
3048/** Opcode 0x0f 0x75. */
3049FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3050{
3051 IEMOP_MNEMONIC("pcmpeqw");
3052 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3053}
3054
3055
3056/** Opcode 0x0f 0x76. */
3057FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3058{
3059 IEMOP_MNEMONIC("pcmpeqd");
3060 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3061}
3062
3063
3064/** Opcode 0x0f 0x77. */
3065FNIEMOP_STUB(iemOp_emms);
3066/** Opcode 0x0f 0x78. */
3067FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3068/** Opcode 0x0f 0x79. */
3069FNIEMOP_UD_STUB(iemOp_vmwrite);
3070/** Opcode 0x0f 0x7c. */
3071FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3072/** Opcode 0x0f 0x7d. */
3073FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3074
3075
3076/** Opcode 0x0f 0x7e. */
3077FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3078{
3079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3080 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3081 {
3082 case IEM_OP_PRF_SIZE_OP: /* SSE */
3083 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3085 {
3086 /* greg, XMM */
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEM_MC_BEGIN(0, 1);
3089 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3090 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3091 {
3092 IEM_MC_LOCAL(uint64_t, u64Tmp);
3093 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3094 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3095 }
3096 else
3097 {
3098 IEM_MC_LOCAL(uint32_t, u32Tmp);
3099 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3100 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3101 }
3102 IEM_MC_ADVANCE_RIP();
3103 IEM_MC_END();
3104 }
3105 else
3106 {
3107 /* [mem], XMM */
3108 IEM_MC_BEGIN(0, 2);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3110 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3114 {
3115 IEM_MC_LOCAL(uint64_t, u64Tmp);
3116 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3117 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3118 }
3119 else
3120 {
3121 IEM_MC_LOCAL(uint32_t, u32Tmp);
3122 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3123 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3124 }
3125 IEM_MC_ADVANCE_RIP();
3126 IEM_MC_END();
3127 }
3128 return VINF_SUCCESS;
3129
3130 case 0: /* MMX */
3131 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3133 {
3134 /* greg, MMX */
3135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3136 IEM_MC_BEGIN(0, 1);
3137 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3138 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3139 {
3140 IEM_MC_LOCAL(uint64_t, u64Tmp);
3141 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3142 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3143 }
3144 else
3145 {
3146 IEM_MC_LOCAL(uint32_t, u32Tmp);
3147 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3148 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3149 }
3150 IEM_MC_ADVANCE_RIP();
3151 IEM_MC_END();
3152 }
3153 else
3154 {
3155 /* [mem], MMX */
3156 IEM_MC_BEGIN(0, 2);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3162 {
3163 IEM_MC_LOCAL(uint64_t, u64Tmp);
3164 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3165 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3166 }
3167 else
3168 {
3169 IEM_MC_LOCAL(uint32_t, u32Tmp);
3170 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3171 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3172 }
3173 IEM_MC_ADVANCE_RIP();
3174 IEM_MC_END();
3175 }
3176 return VINF_SUCCESS;
3177
3178 default:
3179 return IEMOP_RAISE_INVALID_OPCODE();
3180 }
3181}
3182
3183
3184/** Opcode 0x0f 0x7f. */
3185FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3186{
3187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3188 bool fAligned = false;
3189 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3190 {
3191 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3192 fAligned = true;
3193 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3194 if (fAligned)
3195 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3196 else
3197 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3199 {
3200 /*
3201 * Register, register.
3202 */
3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3204 IEM_MC_BEGIN(0, 1);
3205 IEM_MC_LOCAL(uint128_t, u128Tmp);
3206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3207 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3208 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3209 IEM_MC_ADVANCE_RIP();
3210 IEM_MC_END();
3211 }
3212 else
3213 {
3214 /*
3215 * Register, memory.
3216 */
3217 IEM_MC_BEGIN(0, 2);
3218 IEM_MC_LOCAL(uint128_t, u128Tmp);
3219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3220
3221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3223 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3224 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3225 if (fAligned)
3226 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3227 else
3228 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3229
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 return VINF_SUCCESS;
3234
3235 case 0: /* MMX */
3236 IEMOP_MNEMONIC("movq Qq,Pq");
3237
3238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3239 {
3240 /*
3241 * Register, register.
3242 */
3243 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3244 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 IEM_MC_BEGIN(0, 1);
3247 IEM_MC_LOCAL(uint64_t, u64Tmp);
3248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3249 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3250 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 }
3254 else
3255 {
3256 /*
3257 * Register, memory.
3258 */
3259 IEM_MC_BEGIN(0, 2);
3260 IEM_MC_LOCAL(uint64_t, u64Tmp);
3261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3262
3263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3266 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3267 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3268
3269 IEM_MC_ADVANCE_RIP();
3270 IEM_MC_END();
3271 }
3272 return VINF_SUCCESS;
3273
3274 default:
3275 return IEMOP_RAISE_INVALID_OPCODE();
3276 }
3277}
3278
3279
3280
3281/** Opcode 0x0f 0x80. */
3282FNIEMOP_DEF(iemOp_jo_Jv)
3283{
3284 IEMOP_MNEMONIC("jo Jv");
3285 IEMOP_HLP_MIN_386();
3286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3287 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3288 {
3289 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3290 IEMOP_HLP_NO_LOCK_PREFIX();
3291
3292 IEM_MC_BEGIN(0, 0);
3293 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3294 IEM_MC_REL_JMP_S16(i16Imm);
3295 } IEM_MC_ELSE() {
3296 IEM_MC_ADVANCE_RIP();
3297 } IEM_MC_ENDIF();
3298 IEM_MC_END();
3299 }
3300 else
3301 {
3302 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3303 IEMOP_HLP_NO_LOCK_PREFIX();
3304
3305 IEM_MC_BEGIN(0, 0);
3306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3307 IEM_MC_REL_JMP_S32(i32Imm);
3308 } IEM_MC_ELSE() {
3309 IEM_MC_ADVANCE_RIP();
3310 } IEM_MC_ENDIF();
3311 IEM_MC_END();
3312 }
3313 return VINF_SUCCESS;
3314}
3315
3316
3317/** Opcode 0x0f 0x81. */
3318FNIEMOP_DEF(iemOp_jno_Jv)
3319{
3320 IEMOP_MNEMONIC("jno Jv");
3321 IEMOP_HLP_MIN_386();
3322 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3323 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3324 {
3325 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3326 IEMOP_HLP_NO_LOCK_PREFIX();
3327
3328 IEM_MC_BEGIN(0, 0);
3329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3330 IEM_MC_ADVANCE_RIP();
3331 } IEM_MC_ELSE() {
3332 IEM_MC_REL_JMP_S16(i16Imm);
3333 } IEM_MC_ENDIF();
3334 IEM_MC_END();
3335 }
3336 else
3337 {
3338 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3339 IEMOP_HLP_NO_LOCK_PREFIX();
3340
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3343 IEM_MC_ADVANCE_RIP();
3344 } IEM_MC_ELSE() {
3345 IEM_MC_REL_JMP_S32(i32Imm);
3346 } IEM_MC_ENDIF();
3347 IEM_MC_END();
3348 }
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** Opcode 0x0f 0x82. */
3354FNIEMOP_DEF(iemOp_jc_Jv)
3355{
3356 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3357 IEMOP_HLP_MIN_386();
3358 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3359 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3360 {
3361 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3362 IEMOP_HLP_NO_LOCK_PREFIX();
3363
3364 IEM_MC_BEGIN(0, 0);
3365 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3366 IEM_MC_REL_JMP_S16(i16Imm);
3367 } IEM_MC_ELSE() {
3368 IEM_MC_ADVANCE_RIP();
3369 } IEM_MC_ENDIF();
3370 IEM_MC_END();
3371 }
3372 else
3373 {
3374 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3375 IEMOP_HLP_NO_LOCK_PREFIX();
3376
3377 IEM_MC_BEGIN(0, 0);
3378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3379 IEM_MC_REL_JMP_S32(i32Imm);
3380 } IEM_MC_ELSE() {
3381 IEM_MC_ADVANCE_RIP();
3382 } IEM_MC_ENDIF();
3383 IEM_MC_END();
3384 }
3385 return VINF_SUCCESS;
3386}
3387
3388
3389/** Opcode 0x0f 0x83. */
3390FNIEMOP_DEF(iemOp_jnc_Jv)
3391{
3392 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3393 IEMOP_HLP_MIN_386();
3394 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3395 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3396 {
3397 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3398 IEMOP_HLP_NO_LOCK_PREFIX();
3399
3400 IEM_MC_BEGIN(0, 0);
3401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3402 IEM_MC_ADVANCE_RIP();
3403 } IEM_MC_ELSE() {
3404 IEM_MC_REL_JMP_S16(i16Imm);
3405 } IEM_MC_ENDIF();
3406 IEM_MC_END();
3407 }
3408 else
3409 {
3410 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3411 IEMOP_HLP_NO_LOCK_PREFIX();
3412
3413 IEM_MC_BEGIN(0, 0);
3414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3415 IEM_MC_ADVANCE_RIP();
3416 } IEM_MC_ELSE() {
3417 IEM_MC_REL_JMP_S32(i32Imm);
3418 } IEM_MC_ENDIF();
3419 IEM_MC_END();
3420 }
3421 return VINF_SUCCESS;
3422}
3423
3424
3425/** Opcode 0x0f 0x84. */
3426FNIEMOP_DEF(iemOp_je_Jv)
3427{
3428 IEMOP_MNEMONIC("je/jz Jv");
3429 IEMOP_HLP_MIN_386();
3430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3431 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3432 {
3433 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3434 IEMOP_HLP_NO_LOCK_PREFIX();
3435
3436 IEM_MC_BEGIN(0, 0);
3437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3438 IEM_MC_REL_JMP_S16(i16Imm);
3439 } IEM_MC_ELSE() {
3440 IEM_MC_ADVANCE_RIP();
3441 } IEM_MC_ENDIF();
3442 IEM_MC_END();
3443 }
3444 else
3445 {
3446 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3447 IEMOP_HLP_NO_LOCK_PREFIX();
3448
3449 IEM_MC_BEGIN(0, 0);
3450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3451 IEM_MC_REL_JMP_S32(i32Imm);
3452 } IEM_MC_ELSE() {
3453 IEM_MC_ADVANCE_RIP();
3454 } IEM_MC_ENDIF();
3455 IEM_MC_END();
3456 }
3457 return VINF_SUCCESS;
3458}
3459
3460
3461/** Opcode 0x0f 0x85. */
3462FNIEMOP_DEF(iemOp_jne_Jv)
3463{
3464 IEMOP_MNEMONIC("jne/jnz Jv");
3465 IEMOP_HLP_MIN_386();
3466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3467 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3468 {
3469 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3470 IEMOP_HLP_NO_LOCK_PREFIX();
3471
3472 IEM_MC_BEGIN(0, 0);
3473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3474 IEM_MC_ADVANCE_RIP();
3475 } IEM_MC_ELSE() {
3476 IEM_MC_REL_JMP_S16(i16Imm);
3477 } IEM_MC_ENDIF();
3478 IEM_MC_END();
3479 }
3480 else
3481 {
3482 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3483 IEMOP_HLP_NO_LOCK_PREFIX();
3484
3485 IEM_MC_BEGIN(0, 0);
3486 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3487 IEM_MC_ADVANCE_RIP();
3488 } IEM_MC_ELSE() {
3489 IEM_MC_REL_JMP_S32(i32Imm);
3490 } IEM_MC_ENDIF();
3491 IEM_MC_END();
3492 }
3493 return VINF_SUCCESS;
3494}
3495
3496
3497/** Opcode 0x0f 0x86. */
3498FNIEMOP_DEF(iemOp_jbe_Jv)
3499{
3500 IEMOP_MNEMONIC("jbe/jna Jv");
3501 IEMOP_HLP_MIN_386();
3502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3503 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3504 {
3505 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3506 IEMOP_HLP_NO_LOCK_PREFIX();
3507
3508 IEM_MC_BEGIN(0, 0);
3509 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3510 IEM_MC_REL_JMP_S16(i16Imm);
3511 } IEM_MC_ELSE() {
3512 IEM_MC_ADVANCE_RIP();
3513 } IEM_MC_ENDIF();
3514 IEM_MC_END();
3515 }
3516 else
3517 {
3518 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3519 IEMOP_HLP_NO_LOCK_PREFIX();
3520
3521 IEM_MC_BEGIN(0, 0);
3522 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3523 IEM_MC_REL_JMP_S32(i32Imm);
3524 } IEM_MC_ELSE() {
3525 IEM_MC_ADVANCE_RIP();
3526 } IEM_MC_ENDIF();
3527 IEM_MC_END();
3528 }
3529 return VINF_SUCCESS;
3530}
3531
3532
3533/** Opcode 0x0f 0x87. */
3534FNIEMOP_DEF(iemOp_jnbe_Jv)
3535{
3536 IEMOP_MNEMONIC("jnbe/ja Jv");
3537 IEMOP_HLP_MIN_386();
3538 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3539 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3540 {
3541 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3542 IEMOP_HLP_NO_LOCK_PREFIX();
3543
3544 IEM_MC_BEGIN(0, 0);
3545 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3546 IEM_MC_ADVANCE_RIP();
3547 } IEM_MC_ELSE() {
3548 IEM_MC_REL_JMP_S16(i16Imm);
3549 } IEM_MC_ENDIF();
3550 IEM_MC_END();
3551 }
3552 else
3553 {
3554 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3555 IEMOP_HLP_NO_LOCK_PREFIX();
3556
3557 IEM_MC_BEGIN(0, 0);
3558 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3559 IEM_MC_ADVANCE_RIP();
3560 } IEM_MC_ELSE() {
3561 IEM_MC_REL_JMP_S32(i32Imm);
3562 } IEM_MC_ENDIF();
3563 IEM_MC_END();
3564 }
3565 return VINF_SUCCESS;
3566}
3567
3568
3569/** Opcode 0x0f 0x88. */
3570FNIEMOP_DEF(iemOp_js_Jv)
3571{
3572 IEMOP_MNEMONIC("js Jv");
3573 IEMOP_HLP_MIN_386();
3574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3575 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3576 {
3577 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3578 IEMOP_HLP_NO_LOCK_PREFIX();
3579
3580 IEM_MC_BEGIN(0, 0);
3581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3582 IEM_MC_REL_JMP_S16(i16Imm);
3583 } IEM_MC_ELSE() {
3584 IEM_MC_ADVANCE_RIP();
3585 } IEM_MC_ENDIF();
3586 IEM_MC_END();
3587 }
3588 else
3589 {
3590 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3591 IEMOP_HLP_NO_LOCK_PREFIX();
3592
3593 IEM_MC_BEGIN(0, 0);
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3595 IEM_MC_REL_JMP_S32(i32Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600 }
3601 return VINF_SUCCESS;
3602}
3603
3604
3605/** Opcode 0x0f 0x89. */
3606FNIEMOP_DEF(iemOp_jns_Jv)
3607{
3608 IEMOP_MNEMONIC("jns Jv");
3609 IEMOP_HLP_MIN_386();
3610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3611 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3612 {
3613 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3614 IEMOP_HLP_NO_LOCK_PREFIX();
3615
3616 IEM_MC_BEGIN(0, 0);
3617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3618 IEM_MC_ADVANCE_RIP();
3619 } IEM_MC_ELSE() {
3620 IEM_MC_REL_JMP_S16(i16Imm);
3621 } IEM_MC_ENDIF();
3622 IEM_MC_END();
3623 }
3624 else
3625 {
3626 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3627 IEMOP_HLP_NO_LOCK_PREFIX();
3628
3629 IEM_MC_BEGIN(0, 0);
3630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3631 IEM_MC_ADVANCE_RIP();
3632 } IEM_MC_ELSE() {
3633 IEM_MC_REL_JMP_S32(i32Imm);
3634 } IEM_MC_ENDIF();
3635 IEM_MC_END();
3636 }
3637 return VINF_SUCCESS;
3638}
3639
3640
3641/** Opcode 0x0f 0x8a. */
3642FNIEMOP_DEF(iemOp_jp_Jv)
3643{
3644 IEMOP_MNEMONIC("jp Jv");
3645 IEMOP_HLP_MIN_386();
3646 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3647 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3648 {
3649 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3650 IEMOP_HLP_NO_LOCK_PREFIX();
3651
3652 IEM_MC_BEGIN(0, 0);
3653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3654 IEM_MC_REL_JMP_S16(i16Imm);
3655 } IEM_MC_ELSE() {
3656 IEM_MC_ADVANCE_RIP();
3657 } IEM_MC_ENDIF();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3663 IEMOP_HLP_NO_LOCK_PREFIX();
3664
3665 IEM_MC_BEGIN(0, 0);
3666 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3667 IEM_MC_REL_JMP_S32(i32Imm);
3668 } IEM_MC_ELSE() {
3669 IEM_MC_ADVANCE_RIP();
3670 } IEM_MC_ENDIF();
3671 IEM_MC_END();
3672 }
3673 return VINF_SUCCESS;
3674}
3675
3676
3677/** Opcode 0x0f 0x8b. */
3678FNIEMOP_DEF(iemOp_jnp_Jv)
3679{
3680 IEMOP_MNEMONIC("jo Jv");
3681 IEMOP_HLP_MIN_386();
3682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3683 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3684 {
3685 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3686 IEMOP_HLP_NO_LOCK_PREFIX();
3687
3688 IEM_MC_BEGIN(0, 0);
3689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3690 IEM_MC_ADVANCE_RIP();
3691 } IEM_MC_ELSE() {
3692 IEM_MC_REL_JMP_S16(i16Imm);
3693 } IEM_MC_ENDIF();
3694 IEM_MC_END();
3695 }
3696 else
3697 {
3698 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3699 IEMOP_HLP_NO_LOCK_PREFIX();
3700
3701 IEM_MC_BEGIN(0, 0);
3702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3703 IEM_MC_ADVANCE_RIP();
3704 } IEM_MC_ELSE() {
3705 IEM_MC_REL_JMP_S32(i32Imm);
3706 } IEM_MC_ENDIF();
3707 IEM_MC_END();
3708 }
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** Opcode 0x0f 0x8c. */
3714FNIEMOP_DEF(iemOp_jl_Jv)
3715{
3716 IEMOP_MNEMONIC("jl/jnge Jv");
3717 IEMOP_HLP_MIN_386();
3718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3719 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3720 {
3721 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3722 IEMOP_HLP_NO_LOCK_PREFIX();
3723
3724 IEM_MC_BEGIN(0, 0);
3725 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3726 IEM_MC_REL_JMP_S16(i16Imm);
3727 } IEM_MC_ELSE() {
3728 IEM_MC_ADVANCE_RIP();
3729 } IEM_MC_ENDIF();
3730 IEM_MC_END();
3731 }
3732 else
3733 {
3734 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3735 IEMOP_HLP_NO_LOCK_PREFIX();
3736
3737 IEM_MC_BEGIN(0, 0);
3738 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3739 IEM_MC_REL_JMP_S32(i32Imm);
3740 } IEM_MC_ELSE() {
3741 IEM_MC_ADVANCE_RIP();
3742 } IEM_MC_ENDIF();
3743 IEM_MC_END();
3744 }
3745 return VINF_SUCCESS;
3746}
3747
3748
3749/** Opcode 0x0f 0x8d. */
3750FNIEMOP_DEF(iemOp_jnl_Jv)
3751{
3752 IEMOP_MNEMONIC("jnl/jge Jv");
3753 IEMOP_HLP_MIN_386();
3754 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3755 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3756 {
3757 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3758 IEMOP_HLP_NO_LOCK_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0);
3761 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3762 IEM_MC_ADVANCE_RIP();
3763 } IEM_MC_ELSE() {
3764 IEM_MC_REL_JMP_S16(i16Imm);
3765 } IEM_MC_ENDIF();
3766 IEM_MC_END();
3767 }
3768 else
3769 {
3770 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3771 IEMOP_HLP_NO_LOCK_PREFIX();
3772
3773 IEM_MC_BEGIN(0, 0);
3774 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3775 IEM_MC_ADVANCE_RIP();
3776 } IEM_MC_ELSE() {
3777 IEM_MC_REL_JMP_S32(i32Imm);
3778 } IEM_MC_ENDIF();
3779 IEM_MC_END();
3780 }
3781 return VINF_SUCCESS;
3782}
3783
3784
3785/** Opcode 0x0f 0x8e. */
3786FNIEMOP_DEF(iemOp_jle_Jv)
3787{
3788 IEMOP_MNEMONIC("jle/jng Jv");
3789 IEMOP_HLP_MIN_386();
3790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3791 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3792 {
3793 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3794 IEMOP_HLP_NO_LOCK_PREFIX();
3795
3796 IEM_MC_BEGIN(0, 0);
3797 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3798 IEM_MC_REL_JMP_S16(i16Imm);
3799 } IEM_MC_ELSE() {
3800 IEM_MC_ADVANCE_RIP();
3801 } IEM_MC_ENDIF();
3802 IEM_MC_END();
3803 }
3804 else
3805 {
3806 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3807 IEMOP_HLP_NO_LOCK_PREFIX();
3808
3809 IEM_MC_BEGIN(0, 0);
3810 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3811 IEM_MC_REL_JMP_S32(i32Imm);
3812 } IEM_MC_ELSE() {
3813 IEM_MC_ADVANCE_RIP();
3814 } IEM_MC_ENDIF();
3815 IEM_MC_END();
3816 }
3817 return VINF_SUCCESS;
3818}
3819
3820
3821/** Opcode 0x0f 0x8f. */
3822FNIEMOP_DEF(iemOp_jnle_Jv)
3823{
3824 IEMOP_MNEMONIC("jnle/jg Jv");
3825 IEMOP_HLP_MIN_386();
3826 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3827 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3828 {
3829 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3830 IEMOP_HLP_NO_LOCK_PREFIX();
3831
3832 IEM_MC_BEGIN(0, 0);
3833 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3834 IEM_MC_ADVANCE_RIP();
3835 } IEM_MC_ELSE() {
3836 IEM_MC_REL_JMP_S16(i16Imm);
3837 } IEM_MC_ENDIF();
3838 IEM_MC_END();
3839 }
3840 else
3841 {
3842 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3843 IEMOP_HLP_NO_LOCK_PREFIX();
3844
3845 IEM_MC_BEGIN(0, 0);
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_ADVANCE_RIP();
3848 } IEM_MC_ELSE() {
3849 IEM_MC_REL_JMP_S32(i32Imm);
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852 }
3853 return VINF_SUCCESS;
3854}
3855
3856
3857/** Opcode 0x0f 0x90. */
3858FNIEMOP_DEF(iemOp_seto_Eb)
3859{
3860 IEMOP_MNEMONIC("seto Eb");
3861 IEMOP_HLP_MIN_386();
3862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3863 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3864
3865 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3866 * any way. AMD says it's "unused", whatever that means. We're
3867 * ignoring for now. */
3868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3869 {
3870 /* register target */
3871 IEM_MC_BEGIN(0, 0);
3872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3873 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3874 } IEM_MC_ELSE() {
3875 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3876 } IEM_MC_ENDIF();
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 }
3880 else
3881 {
3882 /* memory target */
3883 IEM_MC_BEGIN(0, 1);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3887 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3890 } IEM_MC_ENDIF();
3891 IEM_MC_ADVANCE_RIP();
3892 IEM_MC_END();
3893 }
3894 return VINF_SUCCESS;
3895}
3896
3897
3898/** Opcode 0x0f 0x91. */
3899FNIEMOP_DEF(iemOp_setno_Eb)
3900{
3901 IEMOP_MNEMONIC("setno Eb");
3902 IEMOP_HLP_MIN_386();
3903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3904 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3905
3906 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3907 * any way. AMD says it's "unused", whatever that means. We're
3908 * ignoring for now. */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 /* register target */
3912 IEM_MC_BEGIN(0, 0);
3913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3914 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3915 } IEM_MC_ELSE() {
3916 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3917 } IEM_MC_ENDIF();
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 }
3921 else
3922 {
3923 /* memory target */
3924 IEM_MC_BEGIN(0, 1);
3925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3928 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3929 } IEM_MC_ELSE() {
3930 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3931 } IEM_MC_ENDIF();
3932 IEM_MC_ADVANCE_RIP();
3933 IEM_MC_END();
3934 }
3935 return VINF_SUCCESS;
3936}
3937
3938
3939/** Opcode 0x0f 0x92. */
3940FNIEMOP_DEF(iemOp_setc_Eb)
3941{
3942 IEMOP_MNEMONIC("setc Eb");
3943 IEMOP_HLP_MIN_386();
3944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3945 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3946
3947 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3948 * any way. AMD says it's "unused", whatever that means. We're
3949 * ignoring for now. */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 /* register target */
3953 IEM_MC_BEGIN(0, 0);
3954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3955 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3956 } IEM_MC_ELSE() {
3957 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3958 } IEM_MC_ENDIF();
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 else
3963 {
3964 /* memory target */
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3968 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3969 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3970 } IEM_MC_ELSE() {
3971 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3972 } IEM_MC_ENDIF();
3973 IEM_MC_ADVANCE_RIP();
3974 IEM_MC_END();
3975 }
3976 return VINF_SUCCESS;
3977}
3978
3979
3980/** Opcode 0x0f 0x93. */
3981FNIEMOP_DEF(iemOp_setnc_Eb)
3982{
3983 IEMOP_MNEMONIC("setnc Eb");
3984 IEMOP_HLP_MIN_386();
3985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3986 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3987
3988 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3989 * any way. AMD says it's "unused", whatever that means. We're
3990 * ignoring for now. */
3991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3992 {
3993 /* register target */
3994 IEM_MC_BEGIN(0, 0);
3995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3996 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3997 } IEM_MC_ELSE() {
3998 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3999 } IEM_MC_ENDIF();
4000 IEM_MC_ADVANCE_RIP();
4001 IEM_MC_END();
4002 }
4003 else
4004 {
4005 /* memory target */
4006 IEM_MC_BEGIN(0, 1);
4007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4009 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4010 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4011 } IEM_MC_ELSE() {
4012 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4013 } IEM_MC_ENDIF();
4014 IEM_MC_ADVANCE_RIP();
4015 IEM_MC_END();
4016 }
4017 return VINF_SUCCESS;
4018}
4019
4020
4021/** Opcode 0x0f 0x94. */
4022FNIEMOP_DEF(iemOp_sete_Eb)
4023{
4024 IEMOP_MNEMONIC("sete Eb");
4025 IEMOP_HLP_MIN_386();
4026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4027 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4028
4029 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4030 * any way. AMD says it's "unused", whatever that means. We're
4031 * ignoring for now. */
4032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4033 {
4034 /* register target */
4035 IEM_MC_BEGIN(0, 0);
4036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4038 } IEM_MC_ELSE() {
4039 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4040 } IEM_MC_ENDIF();
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 }
4044 else
4045 {
4046 /* memory target */
4047 IEM_MC_BEGIN(0, 1);
4048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4051 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4052 } IEM_MC_ELSE() {
4053 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4054 } IEM_MC_ENDIF();
4055 IEM_MC_ADVANCE_RIP();
4056 IEM_MC_END();
4057 }
4058 return VINF_SUCCESS;
4059}
4060
4061
4062/** Opcode 0x0f 0x95. */
4063FNIEMOP_DEF(iemOp_setne_Eb)
4064{
4065 IEMOP_MNEMONIC("setne Eb");
4066 IEMOP_HLP_MIN_386();
4067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4068 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4069
4070 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4071 * any way. AMD says it's "unused", whatever that means. We're
4072 * ignoring for now. */
4073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4074 {
4075 /* register target */
4076 IEM_MC_BEGIN(0, 0);
4077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4079 } IEM_MC_ELSE() {
4080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4081 } IEM_MC_ENDIF();
4082 IEM_MC_ADVANCE_RIP();
4083 IEM_MC_END();
4084 }
4085 else
4086 {
4087 /* memory target */
4088 IEM_MC_BEGIN(0, 1);
4089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4092 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4093 } IEM_MC_ELSE() {
4094 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4095 } IEM_MC_ENDIF();
4096 IEM_MC_ADVANCE_RIP();
4097 IEM_MC_END();
4098 }
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** Opcode 0x0f 0x96. */
4104FNIEMOP_DEF(iemOp_setbe_Eb)
4105{
4106 IEMOP_MNEMONIC("setbe Eb");
4107 IEMOP_HLP_MIN_386();
4108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4109 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4110
4111 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4112 * any way. AMD says it's "unused", whatever that means. We're
4113 * ignoring for now. */
4114 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4115 {
4116 /* register target */
4117 IEM_MC_BEGIN(0, 0);
4118 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4119 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4120 } IEM_MC_ELSE() {
4121 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4122 } IEM_MC_ENDIF();
4123 IEM_MC_ADVANCE_RIP();
4124 IEM_MC_END();
4125 }
4126 else
4127 {
4128 /* memory target */
4129 IEM_MC_BEGIN(0, 1);
4130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4132 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4133 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4134 } IEM_MC_ELSE() {
4135 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4136 } IEM_MC_ENDIF();
4137 IEM_MC_ADVANCE_RIP();
4138 IEM_MC_END();
4139 }
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** Opcode 0x0f 0x97. */
4145FNIEMOP_DEF(iemOp_setnbe_Eb)
4146{
4147 IEMOP_MNEMONIC("setnbe Eb");
4148 IEMOP_HLP_MIN_386();
4149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4150 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4151
4152 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4153 * any way. AMD says it's "unused", whatever that means. We're
4154 * ignoring for now. */
4155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4156 {
4157 /* register target */
4158 IEM_MC_BEGIN(0, 0);
4159 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4161 } IEM_MC_ELSE() {
4162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 else
4168 {
4169 /* memory target */
4170 IEM_MC_BEGIN(0, 1);
4171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4173 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4174 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4175 } IEM_MC_ELSE() {
4176 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4177 } IEM_MC_ENDIF();
4178 IEM_MC_ADVANCE_RIP();
4179 IEM_MC_END();
4180 }
4181 return VINF_SUCCESS;
4182}
4183
4184
4185/** Opcode 0x0f 0x98. */
4186FNIEMOP_DEF(iemOp_sets_Eb)
4187{
4188 IEMOP_MNEMONIC("sets Eb");
4189 IEMOP_HLP_MIN_386();
4190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4191 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4192
4193 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4194 * any way. AMD says it's "unused", whatever that means. We're
4195 * ignoring for now. */
4196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4197 {
4198 /* register target */
4199 IEM_MC_BEGIN(0, 0);
4200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4201 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4204 } IEM_MC_ENDIF();
4205 IEM_MC_ADVANCE_RIP();
4206 IEM_MC_END();
4207 }
4208 else
4209 {
4210 /* memory target */
4211 IEM_MC_BEGIN(0, 1);
4212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4215 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4216 } IEM_MC_ELSE() {
4217 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4218 } IEM_MC_ENDIF();
4219 IEM_MC_ADVANCE_RIP();
4220 IEM_MC_END();
4221 }
4222 return VINF_SUCCESS;
4223}
4224
4225
4226/** Opcode 0x0f 0x99. */
4227FNIEMOP_DEF(iemOp_setns_Eb)
4228{
4229 IEMOP_MNEMONIC("setns Eb");
4230 IEMOP_HLP_MIN_386();
4231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4232 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4233
4234 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4235 * any way. AMD says it's "unused", whatever that means. We're
4236 * ignoring for now. */
4237 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4238 {
4239 /* register target */
4240 IEM_MC_BEGIN(0, 0);
4241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4242 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4243 } IEM_MC_ELSE() {
4244 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4245 } IEM_MC_ENDIF();
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 else
4250 {
4251 /* memory target */
4252 IEM_MC_BEGIN(0, 1);
4253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4256 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4257 } IEM_MC_ELSE() {
4258 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4259 } IEM_MC_ENDIF();
4260 IEM_MC_ADVANCE_RIP();
4261 IEM_MC_END();
4262 }
4263 return VINF_SUCCESS;
4264}
4265
4266
4267/** Opcode 0x0f 0x9a. */
4268FNIEMOP_DEF(iemOp_setp_Eb)
4269{
4270 IEMOP_MNEMONIC("setnp Eb");
4271 IEMOP_HLP_MIN_386();
4272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4273 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4274
4275 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4276 * any way. AMD says it's "unused", whatever that means. We're
4277 * ignoring for now. */
4278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4279 {
4280 /* register target */
4281 IEM_MC_BEGIN(0, 0);
4282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4283 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4284 } IEM_MC_ELSE() {
4285 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4286 } IEM_MC_ENDIF();
4287 IEM_MC_ADVANCE_RIP();
4288 IEM_MC_END();
4289 }
4290 else
4291 {
4292 /* memory target */
4293 IEM_MC_BEGIN(0, 1);
4294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4297 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4298 } IEM_MC_ELSE() {
4299 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4300 } IEM_MC_ENDIF();
4301 IEM_MC_ADVANCE_RIP();
4302 IEM_MC_END();
4303 }
4304 return VINF_SUCCESS;
4305}
4306
4307
4308/** Opcode 0x0f 0x9b. */
4309FNIEMOP_DEF(iemOp_setnp_Eb)
4310{
4311 IEMOP_MNEMONIC("setnp Eb");
4312 IEMOP_HLP_MIN_386();
4313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4314 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4315
4316 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4317 * any way. AMD says it's "unused", whatever that means. We're
4318 * ignoring for now. */
4319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4320 {
4321 /* register target */
4322 IEM_MC_BEGIN(0, 0);
4323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4324 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4325 } IEM_MC_ELSE() {
4326 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4327 } IEM_MC_ENDIF();
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 }
4331 else
4332 {
4333 /* memory target */
4334 IEM_MC_BEGIN(0, 1);
4335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4338 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4339 } IEM_MC_ELSE() {
4340 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4341 } IEM_MC_ENDIF();
4342 IEM_MC_ADVANCE_RIP();
4343 IEM_MC_END();
4344 }
4345 return VINF_SUCCESS;
4346}
4347
4348
4349/** Opcode 0x0f 0x9c. */
4350FNIEMOP_DEF(iemOp_setl_Eb)
4351{
4352 IEMOP_MNEMONIC("setl Eb");
4353 IEMOP_HLP_MIN_386();
4354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4355 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4356
4357 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4358 * any way. AMD says it's "unused", whatever that means. We're
4359 * ignoring for now. */
4360 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4361 {
4362 /* register target */
4363 IEM_MC_BEGIN(0, 0);
4364 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4365 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4366 } IEM_MC_ELSE() {
4367 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4368 } IEM_MC_ENDIF();
4369 IEM_MC_ADVANCE_RIP();
4370 IEM_MC_END();
4371 }
4372 else
4373 {
4374 /* memory target */
4375 IEM_MC_BEGIN(0, 1);
4376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4378 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4379 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4380 } IEM_MC_ELSE() {
4381 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4382 } IEM_MC_ENDIF();
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 }
4386 return VINF_SUCCESS;
4387}
4388
4389
4390/** Opcode 0x0f 0x9d. */
4391FNIEMOP_DEF(iemOp_setnl_Eb)
4392{
4393 IEMOP_MNEMONIC("setnl Eb");
4394 IEMOP_HLP_MIN_386();
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4397
4398 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4399 * any way. AMD says it's "unused", whatever that means. We're
4400 * ignoring for now. */
4401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4402 {
4403 /* register target */
4404 IEM_MC_BEGIN(0, 0);
4405 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4406 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4407 } IEM_MC_ELSE() {
4408 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4409 } IEM_MC_ENDIF();
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 /* memory target */
4416 IEM_MC_BEGIN(0, 1);
4417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4419 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4420 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4421 } IEM_MC_ELSE() {
4422 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4423 } IEM_MC_ENDIF();
4424 IEM_MC_ADVANCE_RIP();
4425 IEM_MC_END();
4426 }
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/** Opcode 0x0f 0x9e. */
4432FNIEMOP_DEF(iemOp_setle_Eb)
4433{
4434 IEMOP_MNEMONIC("setle Eb");
4435 IEMOP_HLP_MIN_386();
4436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4437 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4438
4439 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4440 * any way. AMD says it's "unused", whatever that means. We're
4441 * ignoring for now. */
4442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4443 {
4444 /* register target */
4445 IEM_MC_BEGIN(0, 0);
4446 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4447 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4448 } IEM_MC_ELSE() {
4449 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4450 } IEM_MC_ENDIF();
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 }
4454 else
4455 {
4456 /* memory target */
4457 IEM_MC_BEGIN(0, 1);
4458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4460 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4461 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4462 } IEM_MC_ELSE() {
4463 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4464 } IEM_MC_ENDIF();
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 }
4468 return VINF_SUCCESS;
4469}
4470
4471
4472/** Opcode 0x0f 0x9f. */
4473FNIEMOP_DEF(iemOp_setnle_Eb)
4474{
4475 IEMOP_MNEMONIC("setnle Eb");
4476 IEMOP_HLP_MIN_386();
4477 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4478 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4479
4480 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4481 * any way. AMD says it's "unused", whatever that means. We're
4482 * ignoring for now. */
4483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4484 {
4485 /* register target */
4486 IEM_MC_BEGIN(0, 0);
4487 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4488 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4489 } IEM_MC_ELSE() {
4490 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4491 } IEM_MC_ENDIF();
4492 IEM_MC_ADVANCE_RIP();
4493 IEM_MC_END();
4494 }
4495 else
4496 {
4497 /* memory target */
4498 IEM_MC_BEGIN(0, 1);
4499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4501 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4502 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4503 } IEM_MC_ELSE() {
4504 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4505 } IEM_MC_ENDIF();
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 }
4509 return VINF_SUCCESS;
4510}
4511
4512
4513/**
4514 * Common 'push segment-register' helper.
4515 */
4516FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4517{
4518 IEMOP_HLP_NO_LOCK_PREFIX();
4519 if (iReg < X86_SREG_FS)
4520 IEMOP_HLP_NO_64BIT();
4521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4522
4523 switch (pIemCpu->enmEffOpSize)
4524 {
4525 case IEMMODE_16BIT:
4526 IEM_MC_BEGIN(0, 1);
4527 IEM_MC_LOCAL(uint16_t, u16Value);
4528 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4529 IEM_MC_PUSH_U16(u16Value);
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 break;
4533
4534 case IEMMODE_32BIT:
4535 IEM_MC_BEGIN(0, 1);
4536 IEM_MC_LOCAL(uint32_t, u32Value);
4537 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4538 IEM_MC_PUSH_U32_SREG(u32Value);
4539 IEM_MC_ADVANCE_RIP();
4540 IEM_MC_END();
4541 break;
4542
4543 case IEMMODE_64BIT:
4544 IEM_MC_BEGIN(0, 1);
4545 IEM_MC_LOCAL(uint64_t, u64Value);
4546 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4547 IEM_MC_PUSH_U64(u64Value);
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 break;
4551 }
4552
4553 return VINF_SUCCESS;
4554}
4555
4556
4557/** Opcode 0x0f 0xa0. */
4558FNIEMOP_DEF(iemOp_push_fs)
4559{
4560 IEMOP_MNEMONIC("push fs");
4561 IEMOP_HLP_MIN_386();
4562 IEMOP_HLP_NO_LOCK_PREFIX();
4563 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4564}
4565
4566
4567/** Opcode 0x0f 0xa1. */
4568FNIEMOP_DEF(iemOp_pop_fs)
4569{
4570 IEMOP_MNEMONIC("pop fs");
4571 IEMOP_HLP_MIN_386();
4572 IEMOP_HLP_NO_LOCK_PREFIX();
4573 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4574}
4575
4576
4577/** Opcode 0x0f 0xa2. */
4578FNIEMOP_DEF(iemOp_cpuid)
4579{
4580 IEMOP_MNEMONIC("cpuid");
4581 IEMOP_HLP_MIN_486(); /* not all 486es. */
4582 IEMOP_HLP_NO_LOCK_PREFIX();
4583 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4584}
4585
4586
4587/**
4588 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4589 * iemOp_bts_Ev_Gv.
4590 */
4591FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4592{
4593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4595
4596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4597 {
4598 /* register destination. */
4599 IEMOP_HLP_NO_LOCK_PREFIX();
4600 switch (pIemCpu->enmEffOpSize)
4601 {
4602 case IEMMODE_16BIT:
4603 IEM_MC_BEGIN(3, 0);
4604 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4605 IEM_MC_ARG(uint16_t, u16Src, 1);
4606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4607
4608 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4609 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4610 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4611 IEM_MC_REF_EFLAGS(pEFlags);
4612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4613
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 return VINF_SUCCESS;
4617
4618 case IEMMODE_32BIT:
4619 IEM_MC_BEGIN(3, 0);
4620 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4621 IEM_MC_ARG(uint32_t, u32Src, 1);
4622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4623
4624 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4625 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4626 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4627 IEM_MC_REF_EFLAGS(pEFlags);
4628 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4629
4630 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4631 IEM_MC_ADVANCE_RIP();
4632 IEM_MC_END();
4633 return VINF_SUCCESS;
4634
4635 case IEMMODE_64BIT:
4636 IEM_MC_BEGIN(3, 0);
4637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4638 IEM_MC_ARG(uint64_t, u64Src, 1);
4639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4640
4641 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4642 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4643 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4644 IEM_MC_REF_EFLAGS(pEFlags);
4645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4646
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 return VINF_SUCCESS;
4650
4651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4652 }
4653 }
4654 else
4655 {
4656 /* memory destination. */
4657
4658 uint32_t fAccess;
4659 if (pImpl->pfnLockedU16)
4660 fAccess = IEM_ACCESS_DATA_RW;
4661 else /* BT */
4662 {
4663 IEMOP_HLP_NO_LOCK_PREFIX();
4664 fAccess = IEM_ACCESS_DATA_R;
4665 }
4666
4667 NOREF(fAccess);
4668
4669 /** @todo test negative bit offsets! */
4670 switch (pIemCpu->enmEffOpSize)
4671 {
4672 case IEMMODE_16BIT:
4673 IEM_MC_BEGIN(3, 2);
4674 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4675 IEM_MC_ARG(uint16_t, u16Src, 1);
4676 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4678 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4679
4680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4681 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4682 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4683 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4684 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4685 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4686 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4687 IEM_MC_FETCH_EFLAGS(EFlags);
4688
4689 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4690 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4692 else
4693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4694 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4695
4696 IEM_MC_COMMIT_EFLAGS(EFlags);
4697 IEM_MC_ADVANCE_RIP();
4698 IEM_MC_END();
4699 return VINF_SUCCESS;
4700
4701 case IEMMODE_32BIT:
4702 IEM_MC_BEGIN(3, 2);
4703 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4704 IEM_MC_ARG(uint32_t, u32Src, 1);
4705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4707 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4708
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4710 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4711 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4712 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4713 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4714 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4715 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4716 IEM_MC_FETCH_EFLAGS(EFlags);
4717
4718 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4719 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4721 else
4722 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4724
4725 IEM_MC_COMMIT_EFLAGS(EFlags);
4726 IEM_MC_ADVANCE_RIP();
4727 IEM_MC_END();
4728 return VINF_SUCCESS;
4729
4730 case IEMMODE_64BIT:
4731 IEM_MC_BEGIN(3, 2);
4732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4733 IEM_MC_ARG(uint64_t, u64Src, 1);
4734 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4736 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4737
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4739 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4740 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4741 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4742 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4743 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4744 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4745 IEM_MC_FETCH_EFLAGS(EFlags);
4746
4747 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4748 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4750 else
4751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4753
4754 IEM_MC_COMMIT_EFLAGS(EFlags);
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 return VINF_SUCCESS;
4758
4759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4760 }
4761 }
4762}
4763
4764
4765/** Opcode 0x0f 0xa3. */
4766FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4767{
4768 IEMOP_MNEMONIC("bt Gv,Gv");
4769 IEMOP_HLP_MIN_386();
4770 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4771}
4772
4773
4774/**
4775 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4776 */
4777FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4778{
4779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4780 IEMOP_HLP_NO_LOCK_PREFIX();
4781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4782
4783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4784 {
4785 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4786 IEMOP_HLP_NO_LOCK_PREFIX();
4787
4788 switch (pIemCpu->enmEffOpSize)
4789 {
4790 case IEMMODE_16BIT:
4791 IEM_MC_BEGIN(4, 0);
4792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4793 IEM_MC_ARG(uint16_t, u16Src, 1);
4794 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4795 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4796
4797 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4798 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4799 IEM_MC_REF_EFLAGS(pEFlags);
4800 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4801
4802 IEM_MC_ADVANCE_RIP();
4803 IEM_MC_END();
4804 return VINF_SUCCESS;
4805
4806 case IEMMODE_32BIT:
4807 IEM_MC_BEGIN(4, 0);
4808 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4809 IEM_MC_ARG(uint32_t, u32Src, 1);
4810 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4811 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4812
4813 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4814 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4815 IEM_MC_REF_EFLAGS(pEFlags);
4816 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4817
4818 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4819 IEM_MC_ADVANCE_RIP();
4820 IEM_MC_END();
4821 return VINF_SUCCESS;
4822
4823 case IEMMODE_64BIT:
4824 IEM_MC_BEGIN(4, 0);
4825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4826 IEM_MC_ARG(uint64_t, u64Src, 1);
4827 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4828 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4829
4830 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4831 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4832 IEM_MC_REF_EFLAGS(pEFlags);
4833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4834
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 return VINF_SUCCESS;
4838
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4840 }
4841 }
4842 else
4843 {
4844 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4845
4846 switch (pIemCpu->enmEffOpSize)
4847 {
4848 case IEMMODE_16BIT:
4849 IEM_MC_BEGIN(4, 2);
4850 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4851 IEM_MC_ARG(uint16_t, u16Src, 1);
4852 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4855
4856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4857 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4858 IEM_MC_ASSIGN(cShiftArg, cShift);
4859 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4860 IEM_MC_FETCH_EFLAGS(EFlags);
4861 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4862 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4863
4864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4865 IEM_MC_COMMIT_EFLAGS(EFlags);
4866 IEM_MC_ADVANCE_RIP();
4867 IEM_MC_END();
4868 return VINF_SUCCESS;
4869
4870 case IEMMODE_32BIT:
4871 IEM_MC_BEGIN(4, 2);
4872 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4873 IEM_MC_ARG(uint32_t, u32Src, 1);
4874 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4875 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4877
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4879 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4880 IEM_MC_ASSIGN(cShiftArg, cShift);
4881 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4882 IEM_MC_FETCH_EFLAGS(EFlags);
4883 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4884 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4885
4886 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4887 IEM_MC_COMMIT_EFLAGS(EFlags);
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 return VINF_SUCCESS;
4891
4892 case IEMMODE_64BIT:
4893 IEM_MC_BEGIN(4, 2);
4894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4895 IEM_MC_ARG(uint64_t, u64Src, 1);
4896 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4899
4900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4901 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4902 IEM_MC_ASSIGN(cShiftArg, cShift);
4903 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4904 IEM_MC_FETCH_EFLAGS(EFlags);
4905 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4906 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4907
4908 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4909 IEM_MC_COMMIT_EFLAGS(EFlags);
4910 IEM_MC_ADVANCE_RIP();
4911 IEM_MC_END();
4912 return VINF_SUCCESS;
4913
4914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4915 }
4916 }
4917}
4918
4919
4920/**
4921 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4922 */
4923FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4924{
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926 IEMOP_HLP_NO_LOCK_PREFIX();
4927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4928
4929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4930 {
4931 IEMOP_HLP_NO_LOCK_PREFIX();
4932
4933 switch (pIemCpu->enmEffOpSize)
4934 {
4935 case IEMMODE_16BIT:
4936 IEM_MC_BEGIN(4, 0);
4937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4938 IEM_MC_ARG(uint16_t, u16Src, 1);
4939 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4940 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4941
4942 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4943 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4944 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4945 IEM_MC_REF_EFLAGS(pEFlags);
4946 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4947
4948 IEM_MC_ADVANCE_RIP();
4949 IEM_MC_END();
4950 return VINF_SUCCESS;
4951
4952 case IEMMODE_32BIT:
4953 IEM_MC_BEGIN(4, 0);
4954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4955 IEM_MC_ARG(uint32_t, u32Src, 1);
4956 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4957 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4958
4959 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4960 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4961 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4962 IEM_MC_REF_EFLAGS(pEFlags);
4963 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4964
4965 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4966 IEM_MC_ADVANCE_RIP();
4967 IEM_MC_END();
4968 return VINF_SUCCESS;
4969
4970 case IEMMODE_64BIT:
4971 IEM_MC_BEGIN(4, 0);
4972 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4973 IEM_MC_ARG(uint64_t, u64Src, 1);
4974 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4975 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4976
4977 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4978 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4979 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4980 IEM_MC_REF_EFLAGS(pEFlags);
4981 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4982
4983 IEM_MC_ADVANCE_RIP();
4984 IEM_MC_END();
4985 return VINF_SUCCESS;
4986
4987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4988 }
4989 }
4990 else
4991 {
4992 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4993
4994 switch (pIemCpu->enmEffOpSize)
4995 {
4996 case IEMMODE_16BIT:
4997 IEM_MC_BEGIN(4, 2);
4998 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4999 IEM_MC_ARG(uint16_t, u16Src, 1);
5000 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5003
5004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5005 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5006 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5007 IEM_MC_FETCH_EFLAGS(EFlags);
5008 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5009 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5010
5011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5012 IEM_MC_COMMIT_EFLAGS(EFlags);
5013 IEM_MC_ADVANCE_RIP();
5014 IEM_MC_END();
5015 return VINF_SUCCESS;
5016
5017 case IEMMODE_32BIT:
5018 IEM_MC_BEGIN(4, 2);
5019 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5020 IEM_MC_ARG(uint32_t, u32Src, 1);
5021 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5022 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5024
5025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5026 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5027 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5028 IEM_MC_FETCH_EFLAGS(EFlags);
5029 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5030 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5031
5032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5033 IEM_MC_COMMIT_EFLAGS(EFlags);
5034 IEM_MC_ADVANCE_RIP();
5035 IEM_MC_END();
5036 return VINF_SUCCESS;
5037
5038 case IEMMODE_64BIT:
5039 IEM_MC_BEGIN(4, 2);
5040 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5041 IEM_MC_ARG(uint64_t, u64Src, 1);
5042 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5045
5046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5047 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5048 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5049 IEM_MC_FETCH_EFLAGS(EFlags);
5050 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5051 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5052
5053 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5054 IEM_MC_COMMIT_EFLAGS(EFlags);
5055 IEM_MC_ADVANCE_RIP();
5056 IEM_MC_END();
5057 return VINF_SUCCESS;
5058
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 }
5062}
5063
5064
5065
5066/** Opcode 0x0f 0xa4. */
5067FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5068{
5069 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5070 IEMOP_HLP_MIN_386();
5071 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5072}
5073
5074
5075/** Opcode 0x0f 0xa5. */
5076FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5077{
5078 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5079 IEMOP_HLP_MIN_386();
5080 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5081}
5082
5083
5084/** Opcode 0x0f 0xa8. */
5085FNIEMOP_DEF(iemOp_push_gs)
5086{
5087 IEMOP_MNEMONIC("push gs");
5088 IEMOP_HLP_MIN_386();
5089 IEMOP_HLP_NO_LOCK_PREFIX();
5090 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5091}
5092
5093
5094/** Opcode 0x0f 0xa9. */
5095FNIEMOP_DEF(iemOp_pop_gs)
5096{
5097 IEMOP_MNEMONIC("pop gs");
5098 IEMOP_HLP_MIN_386();
5099 IEMOP_HLP_NO_LOCK_PREFIX();
5100 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5101}
5102
5103
5104/** Opcode 0x0f 0xaa. */
5105FNIEMOP_STUB(iemOp_rsm);
5106//IEMOP_HLP_MIN_386();
5107
5108
5109/** Opcode 0x0f 0xab. */
5110FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5111{
5112 IEMOP_MNEMONIC("bts Ev,Gv");
5113 IEMOP_HLP_MIN_386();
5114 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5115}
5116
5117
5118/** Opcode 0x0f 0xac. */
5119FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5120{
5121 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5122 IEMOP_HLP_MIN_386();
5123 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5124}
5125
5126
5127/** Opcode 0x0f 0xad. */
5128FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5129{
5130 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5131 IEMOP_HLP_MIN_386();
5132 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5133}
5134
5135
5136/** Opcode 0x0f 0xae mem/0. */
5137FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5138{
5139 IEMOP_MNEMONIC("fxsave m512");
5140 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5141 return IEMOP_RAISE_INVALID_OPCODE();
5142
5143 IEM_MC_BEGIN(3, 1);
5144 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5145 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5146 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5150 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5151 IEM_MC_END();
5152 return VINF_SUCCESS;
5153}
5154
5155
5156/** Opcode 0x0f 0xae mem/1. */
5157FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5158{
5159 IEMOP_MNEMONIC("fxrstor m512");
5160 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5161 return IEMOP_RAISE_INVALID_OPCODE();
5162
5163 IEM_MC_BEGIN(3, 1);
5164 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5165 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5166 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5169 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5170 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5171 IEM_MC_END();
5172 return VINF_SUCCESS;
5173}
5174
5175
5176/** Opcode 0x0f 0xae mem/2. */
5177FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5178
5179/** Opcode 0x0f 0xae mem/3. */
5180FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5181
5182/** Opcode 0x0f 0xae mem/4. */
5183FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5184
5185/** Opcode 0x0f 0xae mem/5. */
5186FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5187
5188/** Opcode 0x0f 0xae mem/6. */
5189FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5190
5191/** Opcode 0x0f 0xae mem/7. */
5192FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5193
5194
5195/** Opcode 0x0f 0xae 11b/5. */
5196FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5197{
5198 IEMOP_MNEMONIC("lfence");
5199 IEMOP_HLP_NO_LOCK_PREFIX();
5200 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5201 return IEMOP_RAISE_INVALID_OPCODE();
5202
5203 IEM_MC_BEGIN(0, 0);
5204 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5205 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5206 else
5207 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5208 IEM_MC_ADVANCE_RIP();
5209 IEM_MC_END();
5210 return VINF_SUCCESS;
5211}
5212
5213
5214/** Opcode 0x0f 0xae 11b/6. */
5215FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5216{
5217 IEMOP_MNEMONIC("mfence");
5218 IEMOP_HLP_NO_LOCK_PREFIX();
5219 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5220 return IEMOP_RAISE_INVALID_OPCODE();
5221
5222 IEM_MC_BEGIN(0, 0);
5223 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5224 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5225 else
5226 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5227 IEM_MC_ADVANCE_RIP();
5228 IEM_MC_END();
5229 return VINF_SUCCESS;
5230}
5231
5232
5233/** Opcode 0x0f 0xae 11b/7. */
5234FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5235{
5236 IEMOP_MNEMONIC("sfence");
5237 IEMOP_HLP_NO_LOCK_PREFIX();
5238 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5239 return IEMOP_RAISE_INVALID_OPCODE();
5240
5241 IEM_MC_BEGIN(0, 0);
5242 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5243 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5244 else
5245 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5246 IEM_MC_ADVANCE_RIP();
5247 IEM_MC_END();
5248 return VINF_SUCCESS;
5249}
5250
5251
5252/** Opcode 0xf3 0x0f 0xae 11b/0. */
5253FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5254
5255/** Opcode 0xf3 0x0f 0xae 11b/1. */
5256FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5257
5258/** Opcode 0xf3 0x0f 0xae 11b/2. */
5259FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5260
5261/** Opcode 0xf3 0x0f 0xae 11b/3. */
5262FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5263
5264
5265/** Opcode 0x0f 0xae. */
5266FNIEMOP_DEF(iemOp_Grp15)
5267{
5268 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5270 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5271 {
5272 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5273 {
5274 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5275 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5276 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5277 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5278 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5279 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5280 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5281 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5283 }
5284 }
5285 else
5286 {
5287 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5288 {
5289 case 0:
5290 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5291 {
5292 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5293 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5294 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5295 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5296 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5297 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5298 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5299 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5301 }
5302 break;
5303
5304 case IEM_OP_PRF_REPZ:
5305 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5306 {
5307 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5308 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5309 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5310 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5311 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5312 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5313 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5314 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 break;
5318
5319 default:
5320 return IEMOP_RAISE_INVALID_OPCODE();
5321 }
5322 }
5323}
5324
5325
5326/** Opcode 0x0f 0xaf. */
5327FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5328{
5329 IEMOP_MNEMONIC("imul Gv,Ev");
5330 IEMOP_HLP_MIN_386();
5331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5333}
5334
5335
5336/** Opcode 0x0f 0xb0. */
5337FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5338{
5339 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5340 IEMOP_HLP_MIN_486();
5341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5342
5343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5344 {
5345 IEMOP_HLP_DONE_DECODING();
5346 IEM_MC_BEGIN(4, 0);
5347 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5348 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5349 IEM_MC_ARG(uint8_t, u8Src, 2);
5350 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5351
5352 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5353 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5354 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5355 IEM_MC_REF_EFLAGS(pEFlags);
5356 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5358 else
5359 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5360
5361 IEM_MC_ADVANCE_RIP();
5362 IEM_MC_END();
5363 }
5364 else
5365 {
5366 IEM_MC_BEGIN(4, 3);
5367 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5368 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5369 IEM_MC_ARG(uint8_t, u8Src, 2);
5370 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5372 IEM_MC_LOCAL(uint8_t, u8Al);
5373
5374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5375 IEMOP_HLP_DONE_DECODING();
5376 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5377 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5378 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5379 IEM_MC_FETCH_EFLAGS(EFlags);
5380 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5381 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5383 else
5384 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5385
5386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5387 IEM_MC_COMMIT_EFLAGS(EFlags);
5388 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5389 IEM_MC_ADVANCE_RIP();
5390 IEM_MC_END();
5391 }
5392 return VINF_SUCCESS;
5393}
5394
5395/** Opcode 0x0f 0xb1. */
5396FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5397{
5398 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5399 IEMOP_HLP_MIN_486();
5400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5401
5402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5403 {
5404 IEMOP_HLP_DONE_DECODING();
5405 switch (pIemCpu->enmEffOpSize)
5406 {
5407 case IEMMODE_16BIT:
5408 IEM_MC_BEGIN(4, 0);
5409 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5410 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5411 IEM_MC_ARG(uint16_t, u16Src, 2);
5412 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5413
5414 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5416 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5417 IEM_MC_REF_EFLAGS(pEFlags);
5418 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5419 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5420 else
5421 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5422
5423 IEM_MC_ADVANCE_RIP();
5424 IEM_MC_END();
5425 return VINF_SUCCESS;
5426
5427 case IEMMODE_32BIT:
5428 IEM_MC_BEGIN(4, 0);
5429 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5430 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5431 IEM_MC_ARG(uint32_t, u32Src, 2);
5432 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5433
5434 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5435 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5436 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5437 IEM_MC_REF_EFLAGS(pEFlags);
5438 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5440 else
5441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5442
5443 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5444 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 return VINF_SUCCESS;
5448
5449 case IEMMODE_64BIT:
5450 IEM_MC_BEGIN(4, 0);
5451 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5452 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5453#ifdef RT_ARCH_X86
5454 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5455#else
5456 IEM_MC_ARG(uint64_t, u64Src, 2);
5457#endif
5458 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5459
5460 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5461 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5462 IEM_MC_REF_EFLAGS(pEFlags);
5463#ifdef RT_ARCH_X86
5464 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5465 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5466 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5467 else
5468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5469#else
5470 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5471 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5472 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5473 else
5474 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5475#endif
5476
5477 IEM_MC_ADVANCE_RIP();
5478 IEM_MC_END();
5479 return VINF_SUCCESS;
5480
5481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5482 }
5483 }
5484 else
5485 {
5486 switch (pIemCpu->enmEffOpSize)
5487 {
5488 case IEMMODE_16BIT:
5489 IEM_MC_BEGIN(4, 3);
5490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5491 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5492 IEM_MC_ARG(uint16_t, u16Src, 2);
5493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5495 IEM_MC_LOCAL(uint16_t, u16Ax);
5496
5497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5498 IEMOP_HLP_DONE_DECODING();
5499 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5500 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5501 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5502 IEM_MC_FETCH_EFLAGS(EFlags);
5503 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5504 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5506 else
5507 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5508
5509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5510 IEM_MC_COMMIT_EFLAGS(EFlags);
5511 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5512 IEM_MC_ADVANCE_RIP();
5513 IEM_MC_END();
5514 return VINF_SUCCESS;
5515
5516 case IEMMODE_32BIT:
5517 IEM_MC_BEGIN(4, 3);
5518 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5519 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5520 IEM_MC_ARG(uint32_t, u32Src, 2);
5521 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5523 IEM_MC_LOCAL(uint32_t, u32Eax);
5524
5525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5526 IEMOP_HLP_DONE_DECODING();
5527 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5528 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5529 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5530 IEM_MC_FETCH_EFLAGS(EFlags);
5531 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5532 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5533 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5534 else
5535 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5536
5537 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5538 IEM_MC_COMMIT_EFLAGS(EFlags);
5539 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 return VINF_SUCCESS;
5543
5544 case IEMMODE_64BIT:
5545 IEM_MC_BEGIN(4, 3);
5546 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5547 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5548#ifdef RT_ARCH_X86
5549 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5550#else
5551 IEM_MC_ARG(uint64_t, u64Src, 2);
5552#endif
5553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5555 IEM_MC_LOCAL(uint64_t, u64Rax);
5556
5557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5558 IEMOP_HLP_DONE_DECODING();
5559 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5560 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5561 IEM_MC_FETCH_EFLAGS(EFlags);
5562 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5563#ifdef RT_ARCH_X86
5564 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5565 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5566 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5567 else
5568 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5569#else
5570 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5571 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5572 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5573 else
5574 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5575#endif
5576
5577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5578 IEM_MC_COMMIT_EFLAGS(EFlags);
5579 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5580 IEM_MC_ADVANCE_RIP();
5581 IEM_MC_END();
5582 return VINF_SUCCESS;
5583
5584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5585 }
5586 }
5587}
5588
5589
5590FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5591{
5592 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5593 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5594
5595 switch (pIemCpu->enmEffOpSize)
5596 {
5597 case IEMMODE_16BIT:
5598 IEM_MC_BEGIN(5, 1);
5599 IEM_MC_ARG(uint16_t, uSel, 0);
5600 IEM_MC_ARG(uint16_t, offSeg, 1);
5601 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5602 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5603 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5604 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5608 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5609 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5610 IEM_MC_END();
5611 return VINF_SUCCESS;
5612
5613 case IEMMODE_32BIT:
5614 IEM_MC_BEGIN(5, 1);
5615 IEM_MC_ARG(uint16_t, uSel, 0);
5616 IEM_MC_ARG(uint32_t, offSeg, 1);
5617 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5618 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5619 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5620 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5623 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5624 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5625 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5626 IEM_MC_END();
5627 return VINF_SUCCESS;
5628
5629 case IEMMODE_64BIT:
5630 IEM_MC_BEGIN(5, 1);
5631 IEM_MC_ARG(uint16_t, uSel, 0);
5632 IEM_MC_ARG(uint64_t, offSeg, 1);
5633 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5634 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5635 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5636 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5640 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5641 else
5642 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5643 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5644 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5645 IEM_MC_END();
5646 return VINF_SUCCESS;
5647
5648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5649 }
5650}
5651
5652
5653/** Opcode 0x0f 0xb2. */
5654FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5655{
5656 IEMOP_MNEMONIC("lss Gv,Mp");
5657 IEMOP_HLP_MIN_386();
5658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5660 return IEMOP_RAISE_INVALID_OPCODE();
5661 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5662}
5663
5664
5665/** Opcode 0x0f 0xb3. */
5666FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5667{
5668 IEMOP_MNEMONIC("btr Ev,Gv");
5669 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5670}
5671
5672
5673/** Opcode 0x0f 0xb4. */
5674FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5675{
5676 IEMOP_MNEMONIC("lfs Gv,Mp");
5677 IEMOP_HLP_MIN_386();
5678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5680 return IEMOP_RAISE_INVALID_OPCODE();
5681 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5682}
5683
5684
5685/** Opcode 0x0f 0xb5. */
5686FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5687{
5688 IEMOP_MNEMONIC("lgs Gv,Mp");
5689 IEMOP_HLP_MIN_386();
5690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5692 return IEMOP_RAISE_INVALID_OPCODE();
5693 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5694}
5695
5696
5697/** Opcode 0x0f 0xb6. */
5698FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5699{
5700 IEMOP_MNEMONIC("movzx Gv,Eb");
5701 IEMOP_HLP_MIN_386();
5702
5703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5704 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 */
5709 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5710 {
5711 switch (pIemCpu->enmEffOpSize)
5712 {
5713 case IEMMODE_16BIT:
5714 IEM_MC_BEGIN(0, 1);
5715 IEM_MC_LOCAL(uint16_t, u16Value);
5716 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5717 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5718 IEM_MC_ADVANCE_RIP();
5719 IEM_MC_END();
5720 return VINF_SUCCESS;
5721
5722 case IEMMODE_32BIT:
5723 IEM_MC_BEGIN(0, 1);
5724 IEM_MC_LOCAL(uint32_t, u32Value);
5725 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5726 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5727 IEM_MC_ADVANCE_RIP();
5728 IEM_MC_END();
5729 return VINF_SUCCESS;
5730
5731 case IEMMODE_64BIT:
5732 IEM_MC_BEGIN(0, 1);
5733 IEM_MC_LOCAL(uint64_t, u64Value);
5734 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5735 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 /*
5746 * We're loading a register from memory.
5747 */
5748 switch (pIemCpu->enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 2);
5752 IEM_MC_LOCAL(uint16_t, u16Value);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5755 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5756 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5757 IEM_MC_ADVANCE_RIP();
5758 IEM_MC_END();
5759 return VINF_SUCCESS;
5760
5761 case IEMMODE_32BIT:
5762 IEM_MC_BEGIN(0, 2);
5763 IEM_MC_LOCAL(uint32_t, u32Value);
5764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5766 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5767 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5768 IEM_MC_ADVANCE_RIP();
5769 IEM_MC_END();
5770 return VINF_SUCCESS;
5771
5772 case IEMMODE_64BIT:
5773 IEM_MC_BEGIN(0, 2);
5774 IEM_MC_LOCAL(uint64_t, u64Value);
5775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5777 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5778 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5779 IEM_MC_ADVANCE_RIP();
5780 IEM_MC_END();
5781 return VINF_SUCCESS;
5782
5783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5784 }
5785 }
5786}
5787
5788
5789/** Opcode 0x0f 0xb7. */
5790FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5791{
5792 IEMOP_MNEMONIC("movzx Gv,Ew");
5793 IEMOP_HLP_MIN_386();
5794
5795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5796 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5797
5798 /** @todo Not entirely sure how the operand size prefix is handled here,
5799 * assuming that it will be ignored. Would be nice to have a few
5800 * test for this. */
5801 /*
5802 * If rm is denoting a register, no more instruction bytes.
5803 */
5804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5805 {
5806 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5807 {
5808 IEM_MC_BEGIN(0, 1);
5809 IEM_MC_LOCAL(uint32_t, u32Value);
5810 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5811 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5812 IEM_MC_ADVANCE_RIP();
5813 IEM_MC_END();
5814 }
5815 else
5816 {
5817 IEM_MC_BEGIN(0, 1);
5818 IEM_MC_LOCAL(uint64_t, u64Value);
5819 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5820 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5821 IEM_MC_ADVANCE_RIP();
5822 IEM_MC_END();
5823 }
5824 }
5825 else
5826 {
5827 /*
5828 * We're loading a register from memory.
5829 */
5830 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5831 {
5832 IEM_MC_BEGIN(0, 2);
5833 IEM_MC_LOCAL(uint32_t, u32Value);
5834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5836 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5837 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5838 IEM_MC_ADVANCE_RIP();
5839 IEM_MC_END();
5840 }
5841 else
5842 {
5843 IEM_MC_BEGIN(0, 2);
5844 IEM_MC_LOCAL(uint64_t, u64Value);
5845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5847 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5848 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 }
5853 return VINF_SUCCESS;
5854}
5855
5856
5857/** Opcode 0x0f 0xb8. */
5858FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5859
5860
5861/** Opcode 0x0f 0xb9. */
5862FNIEMOP_DEF(iemOp_Grp10)
5863{
5864 Log(("iemOp_Grp10 -> #UD\n"));
5865 return IEMOP_RAISE_INVALID_OPCODE();
5866}
5867
5868
5869/** Opcode 0x0f 0xba. */
5870FNIEMOP_DEF(iemOp_Grp8)
5871{
5872 IEMOP_HLP_MIN_386();
5873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5874 PCIEMOPBINSIZES pImpl;
5875 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5876 {
5877 case 0: case 1: case 2: case 3:
5878 return IEMOP_RAISE_INVALID_OPCODE();
5879 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5880 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5881 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5882 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5884 }
5885 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5886
5887 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5888 {
5889 /* register destination. */
5890 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5891 IEMOP_HLP_NO_LOCK_PREFIX();
5892
5893 switch (pIemCpu->enmEffOpSize)
5894 {
5895 case IEMMODE_16BIT:
5896 IEM_MC_BEGIN(3, 0);
5897 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5898 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5900
5901 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5902 IEM_MC_REF_EFLAGS(pEFlags);
5903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5904
5905 IEM_MC_ADVANCE_RIP();
5906 IEM_MC_END();
5907 return VINF_SUCCESS;
5908
5909 case IEMMODE_32BIT:
5910 IEM_MC_BEGIN(3, 0);
5911 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5912 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5913 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5914
5915 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5916 IEM_MC_REF_EFLAGS(pEFlags);
5917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5918
5919 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 return VINF_SUCCESS;
5923
5924 case IEMMODE_64BIT:
5925 IEM_MC_BEGIN(3, 0);
5926 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5927 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5929
5930 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5931 IEM_MC_REF_EFLAGS(pEFlags);
5932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5933
5934 IEM_MC_ADVANCE_RIP();
5935 IEM_MC_END();
5936 return VINF_SUCCESS;
5937
5938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5939 }
5940 }
5941 else
5942 {
5943 /* memory destination. */
5944
5945 uint32_t fAccess;
5946 if (pImpl->pfnLockedU16)
5947 fAccess = IEM_ACCESS_DATA_RW;
5948 else /* BT */
5949 {
5950 IEMOP_HLP_NO_LOCK_PREFIX();
5951 fAccess = IEM_ACCESS_DATA_R;
5952 }
5953
5954 /** @todo test negative bit offsets! */
5955 switch (pIemCpu->enmEffOpSize)
5956 {
5957 case IEMMODE_16BIT:
5958 IEM_MC_BEGIN(3, 1);
5959 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5960 IEM_MC_ARG(uint16_t, u16Src, 1);
5961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5963
5964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5965 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5966 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5967 IEM_MC_FETCH_EFLAGS(EFlags);
5968 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5969 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5971 else
5972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5974
5975 IEM_MC_COMMIT_EFLAGS(EFlags);
5976 IEM_MC_ADVANCE_RIP();
5977 IEM_MC_END();
5978 return VINF_SUCCESS;
5979
5980 case IEMMODE_32BIT:
5981 IEM_MC_BEGIN(3, 1);
5982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5983 IEM_MC_ARG(uint32_t, u32Src, 1);
5984 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5986
5987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5988 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5989 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5990 IEM_MC_FETCH_EFLAGS(EFlags);
5991 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5992 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5993 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5994 else
5995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5996 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5997
5998 IEM_MC_COMMIT_EFLAGS(EFlags);
5999 IEM_MC_ADVANCE_RIP();
6000 IEM_MC_END();
6001 return VINF_SUCCESS;
6002
6003 case IEMMODE_64BIT:
6004 IEM_MC_BEGIN(3, 1);
6005 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6006 IEM_MC_ARG(uint64_t, u64Src, 1);
6007 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6009
6010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6011 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6012 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6013 IEM_MC_FETCH_EFLAGS(EFlags);
6014 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6015 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6017 else
6018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6020
6021 IEM_MC_COMMIT_EFLAGS(EFlags);
6022 IEM_MC_ADVANCE_RIP();
6023 IEM_MC_END();
6024 return VINF_SUCCESS;
6025
6026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6027 }
6028 }
6029
6030}
6031
6032
6033/** Opcode 0x0f 0xbb. */
6034FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6035{
6036 IEMOP_MNEMONIC("btc Ev,Gv");
6037 IEMOP_HLP_MIN_386();
6038 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6039}
6040
6041
6042/** Opcode 0x0f 0xbc. */
6043FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6044{
6045 IEMOP_MNEMONIC("bsf Gv,Ev");
6046 IEMOP_HLP_MIN_386();
6047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6048 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6049}
6050
6051
6052/** Opcode 0x0f 0xbd. */
6053FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6054{
6055 IEMOP_MNEMONIC("bsr Gv,Ev");
6056 IEMOP_HLP_MIN_386();
6057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6058 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6059}
6060
6061
6062/** Opcode 0x0f 0xbe. */
6063FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6064{
6065 IEMOP_MNEMONIC("movsx Gv,Eb");
6066 IEMOP_HLP_MIN_386();
6067
6068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6069 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6070
6071 /*
6072 * If rm is denoting a register, no more instruction bytes.
6073 */
6074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6075 {
6076 switch (pIemCpu->enmEffOpSize)
6077 {
6078 case IEMMODE_16BIT:
6079 IEM_MC_BEGIN(0, 1);
6080 IEM_MC_LOCAL(uint16_t, u16Value);
6081 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6082 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6083 IEM_MC_ADVANCE_RIP();
6084 IEM_MC_END();
6085 return VINF_SUCCESS;
6086
6087 case IEMMODE_32BIT:
6088 IEM_MC_BEGIN(0, 1);
6089 IEM_MC_LOCAL(uint32_t, u32Value);
6090 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6091 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6092 IEM_MC_ADVANCE_RIP();
6093 IEM_MC_END();
6094 return VINF_SUCCESS;
6095
6096 case IEMMODE_64BIT:
6097 IEM_MC_BEGIN(0, 1);
6098 IEM_MC_LOCAL(uint64_t, u64Value);
6099 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6100 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6101 IEM_MC_ADVANCE_RIP();
6102 IEM_MC_END();
6103 return VINF_SUCCESS;
6104
6105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6106 }
6107 }
6108 else
6109 {
6110 /*
6111 * We're loading a register from memory.
6112 */
6113 switch (pIemCpu->enmEffOpSize)
6114 {
6115 case IEMMODE_16BIT:
6116 IEM_MC_BEGIN(0, 2);
6117 IEM_MC_LOCAL(uint16_t, u16Value);
6118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6120 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6121 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 return VINF_SUCCESS;
6125
6126 case IEMMODE_32BIT:
6127 IEM_MC_BEGIN(0, 2);
6128 IEM_MC_LOCAL(uint32_t, u32Value);
6129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6131 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6132 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 return VINF_SUCCESS;
6136
6137 case IEMMODE_64BIT:
6138 IEM_MC_BEGIN(0, 2);
6139 IEM_MC_LOCAL(uint64_t, u64Value);
6140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6142 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6143 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 return VINF_SUCCESS;
6147
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 }
6151}
6152
6153
6154/** Opcode 0x0f 0xbf. */
6155FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6156{
6157 IEMOP_MNEMONIC("movsx Gv,Ew");
6158 IEMOP_HLP_MIN_386();
6159
6160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6161 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6162
6163 /** @todo Not entirely sure how the operand size prefix is handled here,
6164 * assuming that it will be ignored. Would be nice to have a few
6165 * test for this. */
6166 /*
6167 * If rm is denoting a register, no more instruction bytes.
6168 */
6169 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6170 {
6171 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6172 {
6173 IEM_MC_BEGIN(0, 1);
6174 IEM_MC_LOCAL(uint32_t, u32Value);
6175 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6176 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6177 IEM_MC_ADVANCE_RIP();
6178 IEM_MC_END();
6179 }
6180 else
6181 {
6182 IEM_MC_BEGIN(0, 1);
6183 IEM_MC_LOCAL(uint64_t, u64Value);
6184 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6185 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6186 IEM_MC_ADVANCE_RIP();
6187 IEM_MC_END();
6188 }
6189 }
6190 else
6191 {
6192 /*
6193 * We're loading a register from memory.
6194 */
6195 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6196 {
6197 IEM_MC_BEGIN(0, 2);
6198 IEM_MC_LOCAL(uint32_t, u32Value);
6199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6201 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6202 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 }
6206 else
6207 {
6208 IEM_MC_BEGIN(0, 2);
6209 IEM_MC_LOCAL(uint64_t, u64Value);
6210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6212 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6213 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 }
6217 }
6218 return VINF_SUCCESS;
6219}
6220
6221
6222/** Opcode 0x0f 0xc0. */
6223FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6224{
6225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6226 IEMOP_HLP_MIN_486();
6227 IEMOP_MNEMONIC("xadd Eb,Gb");
6228
6229 /*
6230 * If rm is denoting a register, no more instruction bytes.
6231 */
6232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6233 {
6234 IEMOP_HLP_NO_LOCK_PREFIX();
6235
6236 IEM_MC_BEGIN(3, 0);
6237 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6238 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6239 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6240
6241 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6242 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6243 IEM_MC_REF_EFLAGS(pEFlags);
6244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6245
6246 IEM_MC_ADVANCE_RIP();
6247 IEM_MC_END();
6248 }
6249 else
6250 {
6251 /*
6252 * We're accessing memory.
6253 */
6254 IEM_MC_BEGIN(3, 3);
6255 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6256 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6257 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6258 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6260
6261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6262 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6263 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6264 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6265 IEM_MC_FETCH_EFLAGS(EFlags);
6266 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6268 else
6269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6270
6271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6272 IEM_MC_COMMIT_EFLAGS(EFlags);
6273 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6274 IEM_MC_ADVANCE_RIP();
6275 IEM_MC_END();
6276 return VINF_SUCCESS;
6277 }
6278 return VINF_SUCCESS;
6279}
6280
6281
6282/** Opcode 0x0f 0xc1. */
6283FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6284{
6285 IEMOP_MNEMONIC("xadd Ev,Gv");
6286 IEMOP_HLP_MIN_486();
6287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6288
6289 /*
6290 * If rm is denoting a register, no more instruction bytes.
6291 */
6292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6293 {
6294 IEMOP_HLP_NO_LOCK_PREFIX();
6295
6296 switch (pIemCpu->enmEffOpSize)
6297 {
6298 case IEMMODE_16BIT:
6299 IEM_MC_BEGIN(3, 0);
6300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6301 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6303
6304 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6305 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6306 IEM_MC_REF_EFLAGS(pEFlags);
6307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6308
6309 IEM_MC_ADVANCE_RIP();
6310 IEM_MC_END();
6311 return VINF_SUCCESS;
6312
6313 case IEMMODE_32BIT:
6314 IEM_MC_BEGIN(3, 0);
6315 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6316 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6317 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6318
6319 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6320 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6321 IEM_MC_REF_EFLAGS(pEFlags);
6322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6323
6324 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6325 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6326 IEM_MC_ADVANCE_RIP();
6327 IEM_MC_END();
6328 return VINF_SUCCESS;
6329
6330 case IEMMODE_64BIT:
6331 IEM_MC_BEGIN(3, 0);
6332 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6333 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6335
6336 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6337 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6338 IEM_MC_REF_EFLAGS(pEFlags);
6339 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6340
6341 IEM_MC_ADVANCE_RIP();
6342 IEM_MC_END();
6343 return VINF_SUCCESS;
6344
6345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6346 }
6347 }
6348 else
6349 {
6350 /*
6351 * We're accessing memory.
6352 */
6353 switch (pIemCpu->enmEffOpSize)
6354 {
6355 case IEMMODE_16BIT:
6356 IEM_MC_BEGIN(3, 3);
6357 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6358 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6359 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6360 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6362
6363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6364 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6365 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6366 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6367 IEM_MC_FETCH_EFLAGS(EFlags);
6368 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6369 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6370 else
6371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6372
6373 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6374 IEM_MC_COMMIT_EFLAGS(EFlags);
6375 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6376 IEM_MC_ADVANCE_RIP();
6377 IEM_MC_END();
6378 return VINF_SUCCESS;
6379
6380 case IEMMODE_32BIT:
6381 IEM_MC_BEGIN(3, 3);
6382 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6383 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6384 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6385 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6387
6388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6389 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6390 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6391 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6392 IEM_MC_FETCH_EFLAGS(EFlags);
6393 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6394 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6395 else
6396 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6397
6398 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6399 IEM_MC_COMMIT_EFLAGS(EFlags);
6400 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6401 IEM_MC_ADVANCE_RIP();
6402 IEM_MC_END();
6403 return VINF_SUCCESS;
6404
6405 case IEMMODE_64BIT:
6406 IEM_MC_BEGIN(3, 3);
6407 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6408 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6409 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6410 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6412
6413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6414 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6415 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6416 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6417 IEM_MC_FETCH_EFLAGS(EFlags);
6418 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6419 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6420 else
6421 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6422
6423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6424 IEM_MC_COMMIT_EFLAGS(EFlags);
6425 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6426 IEM_MC_ADVANCE_RIP();
6427 IEM_MC_END();
6428 return VINF_SUCCESS;
6429
6430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6431 }
6432 }
6433}
6434
6435/** Opcode 0x0f 0xc2. */
6436FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6437
6438
6439/** Opcode 0x0f 0xc3. */
6440#if 0 //ndef VBOX_WITH_REM
6441FNIEMOP_DEF(iemOp_movnti_My_Gy)
6442{
6443 IEMOP_MNEMONIC("mov Ev,Gv");
6444
6445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6446
6447 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6448 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6449 {
6450 switch (pIemCpu->enmEffOpSize)
6451 {
6452 case IEMMODE_32BIT:
6453 IEM_MC_BEGIN(0, 2);
6454 IEM_MC_LOCAL(uint32_t, u32Value);
6455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6456
6457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6459 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6460 return IEMOP_RAISE_INVALID_OPCODE();
6461
6462 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6463 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
6464 IEM_MC_ADVANCE_RIP();
6465 IEM_MC_END();
6466 break;
6467
6468 case IEMMODE_64BIT:
6469 IEM_MC_BEGIN(0, 2);
6470 IEM_MC_LOCAL(uint64_t, u64Value);
6471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6472
6473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6476 return IEMOP_RAISE_INVALID_OPCODE();
6477
6478 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6479 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
6480 IEM_MC_ADVANCE_RIP();
6481 IEM_MC_END();
6482 break;
6483
6484 case IEMMODE_16BIT:
6485 /** @todo check this form. */
6486 return IEMOP_RAISE_INVALID_OPCODE();
6487 }
6488 }
6489 else
6490 return IEMOP_RAISE_INVALID_OPCODE();
6491 return VINF_SUCCESS;
6492}
6493#else
6494FNIEMOP_STUB(iemOp_movnti_My_Gy); // solaris 10 uses this in hat_pte_zero().
6495#endif
6496
6497
6498/** Opcode 0x0f 0xc4. */
6499FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6500
6501/** Opcode 0x0f 0xc5. */
6502FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6503
6504/** Opcode 0x0f 0xc6. */
6505FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6506
6507
6508/** Opcode 0x0f 0xc7 !11/1. */
6509FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6510{
6511 IEMOP_MNEMONIC("cmpxchg8b Mq");
6512
6513 IEM_MC_BEGIN(4, 3);
6514 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6515 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6516 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6517 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6518 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6519 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6521
6522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6523 IEMOP_HLP_DONE_DECODING();
6524 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6525
6526 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6527 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6528 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6529
6530 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6531 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6532 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6533
6534 IEM_MC_FETCH_EFLAGS(EFlags);
6535 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6536 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6537 else
6538 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6539
6540 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6541 IEM_MC_COMMIT_EFLAGS(EFlags);
6542 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6543 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6544 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6545 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6546 IEM_MC_ENDIF();
6547 IEM_MC_ADVANCE_RIP();
6548
6549 IEM_MC_END();
6550 return VINF_SUCCESS;
6551}
6552
6553
6554/** Opcode REX.W 0x0f 0xc7 !11/1. */
6555FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6556
6557/** Opcode 0x0f 0xc7 11/6. */
6558FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6559
6560/** Opcode 0x0f 0xc7 !11/6. */
6561FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6562
6563/** Opcode 0x66 0x0f 0xc7 !11/6. */
6564FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6565
6566/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6567FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6568
6569/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6570FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6571
6572
6573/** Opcode 0x0f 0xc7. */
6574FNIEMOP_DEF(iemOp_Grp9)
6575{
6576 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6579 {
6580 case 0: case 2: case 3: case 4: case 5:
6581 return IEMOP_RAISE_INVALID_OPCODE();
6582 case 1:
6583 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6584 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6585 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6586 return IEMOP_RAISE_INVALID_OPCODE();
6587 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6588 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6589 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6590 case 6:
6591 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6592 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6593 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6594 {
6595 case 0:
6596 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6597 case IEM_OP_PRF_SIZE_OP:
6598 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6599 case IEM_OP_PRF_REPZ:
6600 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6601 default:
6602 return IEMOP_RAISE_INVALID_OPCODE();
6603 }
6604 case 7:
6605 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6606 {
6607 case 0:
6608 case IEM_OP_PRF_REPZ:
6609 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6610 default:
6611 return IEMOP_RAISE_INVALID_OPCODE();
6612 }
6613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6614 }
6615}
6616
6617
6618/**
6619 * Common 'bswap register' helper.
6620 */
6621FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6622{
6623 IEMOP_HLP_NO_LOCK_PREFIX();
6624 switch (pIemCpu->enmEffOpSize)
6625 {
6626 case IEMMODE_16BIT:
6627 IEM_MC_BEGIN(1, 0);
6628 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6629 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6630 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6631 IEM_MC_ADVANCE_RIP();
6632 IEM_MC_END();
6633 return VINF_SUCCESS;
6634
6635 case IEMMODE_32BIT:
6636 IEM_MC_BEGIN(1, 0);
6637 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6638 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6639 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6640 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6641 IEM_MC_ADVANCE_RIP();
6642 IEM_MC_END();
6643 return VINF_SUCCESS;
6644
6645 case IEMMODE_64BIT:
6646 IEM_MC_BEGIN(1, 0);
6647 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6648 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6649 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6650 IEM_MC_ADVANCE_RIP();
6651 IEM_MC_END();
6652 return VINF_SUCCESS;
6653
6654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6655 }
6656}
6657
6658
6659/** Opcode 0x0f 0xc8. */
6660FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6661{
6662 IEMOP_MNEMONIC("bswap rAX/r8");
6663 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6664 prefix. REX.B is the correct prefix it appears. For a parallel
6665 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6666 IEMOP_HLP_MIN_486();
6667 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6668}
6669
6670
6671/** Opcode 0x0f 0xc9. */
6672FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6673{
6674 IEMOP_MNEMONIC("bswap rCX/r9");
6675 IEMOP_HLP_MIN_486();
6676 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6677}
6678
6679
6680/** Opcode 0x0f 0xca. */
6681FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6682{
6683 IEMOP_MNEMONIC("bswap rDX/r9");
6684 IEMOP_HLP_MIN_486();
6685 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6686}
6687
6688
6689/** Opcode 0x0f 0xcb. */
6690FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6691{
6692 IEMOP_MNEMONIC("bswap rBX/r9");
6693 IEMOP_HLP_MIN_486();
6694 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6695}
6696
6697
6698/** Opcode 0x0f 0xcc. */
6699FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6700{
6701 IEMOP_MNEMONIC("bswap rSP/r12");
6702 IEMOP_HLP_MIN_486();
6703 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6704}
6705
6706
6707/** Opcode 0x0f 0xcd. */
6708FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6709{
6710 IEMOP_MNEMONIC("bswap rBP/r13");
6711 IEMOP_HLP_MIN_486();
6712 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6713}
6714
6715
6716/** Opcode 0x0f 0xce. */
6717FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6718{
6719 IEMOP_MNEMONIC("bswap rSI/r14");
6720 IEMOP_HLP_MIN_486();
6721 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6722}
6723
6724
6725/** Opcode 0x0f 0xcf. */
6726FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6727{
6728 IEMOP_MNEMONIC("bswap rDI/r15");
6729 IEMOP_HLP_MIN_486();
6730 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6731}
6732
6733
6734
6735/** Opcode 0x0f 0xd0. */
6736FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6737/** Opcode 0x0f 0xd1. */
6738FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6739/** Opcode 0x0f 0xd2. */
6740FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6741/** Opcode 0x0f 0xd3. */
6742FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6743/** Opcode 0x0f 0xd4. */
6744FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6745/** Opcode 0x0f 0xd5. */
6746FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6747/** Opcode 0x0f 0xd6. */
6748FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6749
6750
6751/** Opcode 0x0f 0xd7. */
6752FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6753{
6754 /* Docs says register only. */
6755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6756 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6757 return IEMOP_RAISE_INVALID_OPCODE();
6758
6759 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6760 /** @todo testcase: Check that the instruction implicitly clears the high
6761 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6762 * and opcode modifications are made to work with the whole width (not
6763 * just 128). */
6764 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6765 {
6766 case IEM_OP_PRF_SIZE_OP: /* SSE */
6767 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6768 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6769 IEM_MC_BEGIN(2, 0);
6770 IEM_MC_ARG(uint64_t *, pDst, 0);
6771 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6772 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6773 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6774 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6775 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6776 IEM_MC_ADVANCE_RIP();
6777 IEM_MC_END();
6778 return VINF_SUCCESS;
6779
6780 case 0: /* MMX */
6781 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6782 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6783 IEM_MC_BEGIN(2, 0);
6784 IEM_MC_ARG(uint64_t *, pDst, 0);
6785 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6786 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6787 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6788 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6789 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6790 IEM_MC_ADVANCE_RIP();
6791 IEM_MC_END();
6792 return VINF_SUCCESS;
6793
6794 default:
6795 return IEMOP_RAISE_INVALID_OPCODE();
6796 }
6797}
6798
6799
6800/** Opcode 0x0f 0xd8. */
6801FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6802/** Opcode 0x0f 0xd9. */
6803FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6804/** Opcode 0x0f 0xda. */
6805FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6806/** Opcode 0x0f 0xdb. */
6807FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6808/** Opcode 0x0f 0xdc. */
6809FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6810/** Opcode 0x0f 0xdd. */
6811FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6812/** Opcode 0x0f 0xde. */
6813FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6814/** Opcode 0x0f 0xdf. */
6815FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6816/** Opcode 0x0f 0xe0. */
6817FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6818/** Opcode 0x0f 0xe1. */
6819FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6820/** Opcode 0x0f 0xe2. */
6821FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6822/** Opcode 0x0f 0xe3. */
6823FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6824/** Opcode 0x0f 0xe4. */
6825FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6826/** Opcode 0x0f 0xe5. */
6827FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6828/** Opcode 0x0f 0xe6. */
6829FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6830
6831
6832/** Opcode 0x0f 0xe7. */
6833#if 0 //ndef VBOX_WITH_REM
6834FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
6835{
6836 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
6837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6838 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6839 {
6840 /*
6841 * Register, memory.
6842 */
6843/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
6844 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6845 {
6846
6847 case IEM_OP_PRF_SIZE_OP: /* SSE */
6848 IEM_MC_BEGIN(0, 2);
6849 IEM_MC_LOCAL(uint128_t, uSrc);
6850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6851
6852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6854 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6855
6856 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6857 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6858
6859 IEM_MC_ADVANCE_RIP();
6860 IEM_MC_END();
6861 break;
6862
6863 case 0: /* MMX */
6864 IEM_MC_BEGIN(0, 2);
6865 IEM_MC_LOCAL(uint64_t, uSrc);
6866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6867
6868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6870 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6871
6872 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6873 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6874
6875 IEM_MC_ADVANCE_RIP();
6876 IEM_MC_END();
6877 break;
6878
6879 default:
6880 return IEMOP_RAISE_INVALID_OPCODE();
6881 }
6882 }
6883 /* The register, register encoding is invalid. */
6884 else
6885 return IEMOP_RAISE_INVALID_OPCODE();
6886 return VINF_SUCCESS;
6887}
6888#else
6889FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6890#endif
6891
6892
6893/** Opcode 0x0f 0xe8. */
6894FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6895/** Opcode 0x0f 0xe9. */
6896FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6897/** Opcode 0x0f 0xea. */
6898FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6899/** Opcode 0x0f 0xeb. */
6900FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6901/** Opcode 0x0f 0xec. */
6902FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6903/** Opcode 0x0f 0xed. */
6904FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6905/** Opcode 0x0f 0xee. */
6906FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6907
6908
6909/** Opcode 0x0f 0xef. */
6910FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6911{
6912 IEMOP_MNEMONIC("pxor");
6913 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6914}
6915
6916
6917/** Opcode 0x0f 0xf0. */
6918FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6919/** Opcode 0x0f 0xf1. */
6920FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6921/** Opcode 0x0f 0xf2. */
6922FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6923/** Opcode 0x0f 0xf3. */
6924FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6925/** Opcode 0x0f 0xf4. */
6926FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6927/** Opcode 0x0f 0xf5. */
6928FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6929/** Opcode 0x0f 0xf6. */
6930FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6931/** Opcode 0x0f 0xf7. */
6932FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6933/** Opcode 0x0f 0xf8. */
6934FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6935/** Opcode 0x0f 0xf9. */
6936FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6937/** Opcode 0x0f 0xfa. */
6938FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6939/** Opcode 0x0f 0xfb. */
6940FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6941/** Opcode 0x0f 0xfc. */
6942FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6943/** Opcode 0x0f 0xfd. */
6944FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6945/** Opcode 0x0f 0xfe. */
6946FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6947
6948
6949const PFNIEMOP g_apfnTwoByteMap[256] =
6950{
6951 /* 0x00 */ iemOp_Grp6,
6952 /* 0x01 */ iemOp_Grp7,
6953 /* 0x02 */ iemOp_lar_Gv_Ew,
6954 /* 0x03 */ iemOp_lsl_Gv_Ew,
6955 /* 0x04 */ iemOp_Invalid,
6956 /* 0x05 */ iemOp_syscall,
6957 /* 0x06 */ iemOp_clts,
6958 /* 0x07 */ iemOp_sysret,
6959 /* 0x08 */ iemOp_invd,
6960 /* 0x09 */ iemOp_wbinvd,
6961 /* 0x0a */ iemOp_Invalid,
6962 /* 0x0b */ iemOp_ud2,
6963 /* 0x0c */ iemOp_Invalid,
6964 /* 0x0d */ iemOp_nop_Ev_GrpP,
6965 /* 0x0e */ iemOp_femms,
6966 /* 0x0f */ iemOp_3Dnow,
6967 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6968 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6969 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6970 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6971 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6972 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6973 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6974 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6975 /* 0x18 */ iemOp_prefetch_Grp16,
6976 /* 0x19 */ iemOp_nop_Ev,
6977 /* 0x1a */ iemOp_nop_Ev,
6978 /* 0x1b */ iemOp_nop_Ev,
6979 /* 0x1c */ iemOp_nop_Ev,
6980 /* 0x1d */ iemOp_nop_Ev,
6981 /* 0x1e */ iemOp_nop_Ev,
6982 /* 0x1f */ iemOp_nop_Ev,
6983 /* 0x20 */ iemOp_mov_Rd_Cd,
6984 /* 0x21 */ iemOp_mov_Rd_Dd,
6985 /* 0x22 */ iemOp_mov_Cd_Rd,
6986 /* 0x23 */ iemOp_mov_Dd_Rd,
6987 /* 0x24 */ iemOp_mov_Rd_Td,
6988 /* 0x25 */ iemOp_Invalid,
6989 /* 0x26 */ iemOp_mov_Td_Rd,
6990 /* 0x27 */ iemOp_Invalid,
6991 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6992 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6993 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6994 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6995 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6996 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6997 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6998 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6999 /* 0x30 */ iemOp_wrmsr,
7000 /* 0x31 */ iemOp_rdtsc,
7001 /* 0x32 */ iemOp_rdmsr,
7002 /* 0x33 */ iemOp_rdpmc,
7003 /* 0x34 */ iemOp_sysenter,
7004 /* 0x35 */ iemOp_sysexit,
7005 /* 0x36 */ iemOp_Invalid,
7006 /* 0x37 */ iemOp_getsec,
7007 /* 0x38 */ iemOp_3byte_Esc_A4,
7008 /* 0x39 */ iemOp_Invalid,
7009 /* 0x3a */ iemOp_3byte_Esc_A5,
7010 /* 0x3b */ iemOp_Invalid,
7011 /* 0x3c */ iemOp_Invalid,
7012 /* 0x3d */ iemOp_Invalid,
7013 /* 0x3e */ iemOp_Invalid,
7014 /* 0x3f */ iemOp_Invalid,
7015 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7016 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7017 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7018 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7019 /* 0x44 */ iemOp_cmove_Gv_Ev,
7020 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7021 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7022 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7023 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7024 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7025 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7026 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7027 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7028 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7029 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7030 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7031 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7032 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7033 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7034 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7035 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7036 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7037 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7038 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7039 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7040 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7041 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7042 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7043 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7044 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7045 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7046 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7047 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7048 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7049 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7050 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7051 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7052 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7053 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7054 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7055 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7056 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7057 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7058 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7059 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7060 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7061 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7062 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7063 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7064 /* 0x71 */ iemOp_Grp12,
7065 /* 0x72 */ iemOp_Grp13,
7066 /* 0x73 */ iemOp_Grp14,
7067 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7068 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7069 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7070 /* 0x77 */ iemOp_emms,
7071 /* 0x78 */ iemOp_vmread_AmdGrp17,
7072 /* 0x79 */ iemOp_vmwrite,
7073 /* 0x7a */ iemOp_Invalid,
7074 /* 0x7b */ iemOp_Invalid,
7075 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7076 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7077 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7078 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7079 /* 0x80 */ iemOp_jo_Jv,
7080 /* 0x81 */ iemOp_jno_Jv,
7081 /* 0x82 */ iemOp_jc_Jv,
7082 /* 0x83 */ iemOp_jnc_Jv,
7083 /* 0x84 */ iemOp_je_Jv,
7084 /* 0x85 */ iemOp_jne_Jv,
7085 /* 0x86 */ iemOp_jbe_Jv,
7086 /* 0x87 */ iemOp_jnbe_Jv,
7087 /* 0x88 */ iemOp_js_Jv,
7088 /* 0x89 */ iemOp_jns_Jv,
7089 /* 0x8a */ iemOp_jp_Jv,
7090 /* 0x8b */ iemOp_jnp_Jv,
7091 /* 0x8c */ iemOp_jl_Jv,
7092 /* 0x8d */ iemOp_jnl_Jv,
7093 /* 0x8e */ iemOp_jle_Jv,
7094 /* 0x8f */ iemOp_jnle_Jv,
7095 /* 0x90 */ iemOp_seto_Eb,
7096 /* 0x91 */ iemOp_setno_Eb,
7097 /* 0x92 */ iemOp_setc_Eb,
7098 /* 0x93 */ iemOp_setnc_Eb,
7099 /* 0x94 */ iemOp_sete_Eb,
7100 /* 0x95 */ iemOp_setne_Eb,
7101 /* 0x96 */ iemOp_setbe_Eb,
7102 /* 0x97 */ iemOp_setnbe_Eb,
7103 /* 0x98 */ iemOp_sets_Eb,
7104 /* 0x99 */ iemOp_setns_Eb,
7105 /* 0x9a */ iemOp_setp_Eb,
7106 /* 0x9b */ iemOp_setnp_Eb,
7107 /* 0x9c */ iemOp_setl_Eb,
7108 /* 0x9d */ iemOp_setnl_Eb,
7109 /* 0x9e */ iemOp_setle_Eb,
7110 /* 0x9f */ iemOp_setnle_Eb,
7111 /* 0xa0 */ iemOp_push_fs,
7112 /* 0xa1 */ iemOp_pop_fs,
7113 /* 0xa2 */ iemOp_cpuid,
7114 /* 0xa3 */ iemOp_bt_Ev_Gv,
7115 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7116 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7117 /* 0xa6 */ iemOp_Invalid,
7118 /* 0xa7 */ iemOp_Invalid,
7119 /* 0xa8 */ iemOp_push_gs,
7120 /* 0xa9 */ iemOp_pop_gs,
7121 /* 0xaa */ iemOp_rsm,
7122 /* 0xab */ iemOp_bts_Ev_Gv,
7123 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7124 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7125 /* 0xae */ iemOp_Grp15,
7126 /* 0xaf */ iemOp_imul_Gv_Ev,
7127 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7128 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7129 /* 0xb2 */ iemOp_lss_Gv_Mp,
7130 /* 0xb3 */ iemOp_btr_Ev_Gv,
7131 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7132 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7133 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7134 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7135 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7136 /* 0xb9 */ iemOp_Grp10,
7137 /* 0xba */ iemOp_Grp8,
7138 /* 0xbd */ iemOp_btc_Ev_Gv,
7139 /* 0xbc */ iemOp_bsf_Gv_Ev,
7140 /* 0xbd */ iemOp_bsr_Gv_Ev,
7141 /* 0xbe */ iemOp_movsx_Gv_Eb,
7142 /* 0xbf */ iemOp_movsx_Gv_Ew,
7143 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7144 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7145 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7146 /* 0xc3 */ iemOp_movnti_My_Gy,
7147 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7148 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7149 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7150 /* 0xc7 */ iemOp_Grp9,
7151 /* 0xc8 */ iemOp_bswap_rAX_r8,
7152 /* 0xc9 */ iemOp_bswap_rCX_r9,
7153 /* 0xca */ iemOp_bswap_rDX_r10,
7154 /* 0xcb */ iemOp_bswap_rBX_r11,
7155 /* 0xcc */ iemOp_bswap_rSP_r12,
7156 /* 0xcd */ iemOp_bswap_rBP_r13,
7157 /* 0xce */ iemOp_bswap_rSI_r14,
7158 /* 0xcf */ iemOp_bswap_rDI_r15,
7159 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7160 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7161 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7162 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7163 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7164 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7165 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7166 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7167 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7168 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7169 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7170 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7171 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7172 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7173 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7174 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7175 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7176 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7177 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7178 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7179 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7180 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7181 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7182 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7183 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7184 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7185 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7186 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7187 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7188 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7189 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7190 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7191 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7192 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7193 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7194 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7195 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7196 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7197 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7198 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7199 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7200 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7201 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7202 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7203 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7204 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7205 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7206 /* 0xff */ iemOp_Invalid
7207};
7208
7209/** @} */
7210
7211
7212/** @name One byte opcodes.
7213 *
7214 * @{
7215 */
7216
7217/** Opcode 0x00. */
7218FNIEMOP_DEF(iemOp_add_Eb_Gb)
7219{
7220 IEMOP_MNEMONIC("add Eb,Gb");
7221 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7222}
7223
7224
7225/** Opcode 0x01. */
7226FNIEMOP_DEF(iemOp_add_Ev_Gv)
7227{
7228 IEMOP_MNEMONIC("add Ev,Gv");
7229 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7230}
7231
7232
7233/** Opcode 0x02. */
7234FNIEMOP_DEF(iemOp_add_Gb_Eb)
7235{
7236 IEMOP_MNEMONIC("add Gb,Eb");
7237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7238}
7239
7240
7241/** Opcode 0x03. */
7242FNIEMOP_DEF(iemOp_add_Gv_Ev)
7243{
7244 IEMOP_MNEMONIC("add Gv,Ev");
7245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7246}
7247
7248
7249/** Opcode 0x04. */
7250FNIEMOP_DEF(iemOp_add_Al_Ib)
7251{
7252 IEMOP_MNEMONIC("add al,Ib");
7253 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7254}
7255
7256
7257/** Opcode 0x05. */
7258FNIEMOP_DEF(iemOp_add_eAX_Iz)
7259{
7260 IEMOP_MNEMONIC("add rAX,Iz");
7261 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7262}
7263
7264
7265/** Opcode 0x06. */
7266FNIEMOP_DEF(iemOp_push_ES)
7267{
7268 IEMOP_MNEMONIC("push es");
7269 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7270}
7271
7272
7273/** Opcode 0x07. */
7274FNIEMOP_DEF(iemOp_pop_ES)
7275{
7276 IEMOP_MNEMONIC("pop es");
7277 IEMOP_HLP_NO_64BIT();
7278 IEMOP_HLP_NO_LOCK_PREFIX();
7279 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7280}
7281
7282
7283/** Opcode 0x08. */
7284FNIEMOP_DEF(iemOp_or_Eb_Gb)
7285{
7286 IEMOP_MNEMONIC("or Eb,Gb");
7287 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7288 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7289}
7290
7291
7292/** Opcode 0x09. */
7293FNIEMOP_DEF(iemOp_or_Ev_Gv)
7294{
7295 IEMOP_MNEMONIC("or Ev,Gv ");
7296 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7297 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7298}
7299
7300
7301/** Opcode 0x0a. */
7302FNIEMOP_DEF(iemOp_or_Gb_Eb)
7303{
7304 IEMOP_MNEMONIC("or Gb,Eb");
7305 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7306 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7307}
7308
7309
7310/** Opcode 0x0b. */
7311FNIEMOP_DEF(iemOp_or_Gv_Ev)
7312{
7313 IEMOP_MNEMONIC("or Gv,Ev");
7314 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7315 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7316}
7317
7318
7319/** Opcode 0x0c. */
7320FNIEMOP_DEF(iemOp_or_Al_Ib)
7321{
7322 IEMOP_MNEMONIC("or al,Ib");
7323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7325}
7326
7327
7328/** Opcode 0x0d. */
7329FNIEMOP_DEF(iemOp_or_eAX_Iz)
7330{
7331 IEMOP_MNEMONIC("or rAX,Iz");
7332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7333 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7334}
7335
7336
7337/** Opcode 0x0e. */
7338FNIEMOP_DEF(iemOp_push_CS)
7339{
7340 IEMOP_MNEMONIC("push cs");
7341 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7342}
7343
7344
7345/** Opcode 0x0f. */
7346FNIEMOP_DEF(iemOp_2byteEscape)
7347{
7348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7349 /** @todo PUSH CS on 8086, undefined on 80186. */
7350 IEMOP_HLP_MIN_286();
7351 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7352}
7353
7354/** Opcode 0x10. */
7355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7356{
7357 IEMOP_MNEMONIC("adc Eb,Gb");
7358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7359}
7360
7361
7362/** Opcode 0x11. */
7363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7364{
7365 IEMOP_MNEMONIC("adc Ev,Gv");
7366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7367}
7368
7369
7370/** Opcode 0x12. */
7371FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7372{
7373 IEMOP_MNEMONIC("adc Gb,Eb");
7374 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7375}
7376
7377
7378/** Opcode 0x13. */
7379FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7380{
7381 IEMOP_MNEMONIC("adc Gv,Ev");
7382 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7383}
7384
7385
7386/** Opcode 0x14. */
7387FNIEMOP_DEF(iemOp_adc_Al_Ib)
7388{
7389 IEMOP_MNEMONIC("adc al,Ib");
7390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7391}
7392
7393
7394/** Opcode 0x15. */
7395FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7396{
7397 IEMOP_MNEMONIC("adc rAX,Iz");
7398 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7399}
7400
7401
7402/** Opcode 0x16. */
7403FNIEMOP_DEF(iemOp_push_SS)
7404{
7405 IEMOP_MNEMONIC("push ss");
7406 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7407}
7408
7409
7410/** Opcode 0x17. */
7411FNIEMOP_DEF(iemOp_pop_SS)
7412{
7413 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7414 IEMOP_HLP_NO_LOCK_PREFIX();
7415 IEMOP_HLP_NO_64BIT();
7416 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7417}
7418
7419
7420/** Opcode 0x18. */
7421FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7422{
7423 IEMOP_MNEMONIC("sbb Eb,Gb");
7424 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7425}
7426
7427
7428/** Opcode 0x19. */
7429FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7430{
7431 IEMOP_MNEMONIC("sbb Ev,Gv");
7432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7433}
7434
7435
7436/** Opcode 0x1a. */
7437FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7438{
7439 IEMOP_MNEMONIC("sbb Gb,Eb");
7440 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7441}
7442
7443
7444/** Opcode 0x1b. */
7445FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7446{
7447 IEMOP_MNEMONIC("sbb Gv,Ev");
7448 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7449}
7450
7451
7452/** Opcode 0x1c. */
7453FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7454{
7455 IEMOP_MNEMONIC("sbb al,Ib");
7456 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7457}
7458
7459
7460/** Opcode 0x1d. */
7461FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7462{
7463 IEMOP_MNEMONIC("sbb rAX,Iz");
7464 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7465}
7466
7467
7468/** Opcode 0x1e. */
7469FNIEMOP_DEF(iemOp_push_DS)
7470{
7471 IEMOP_MNEMONIC("push ds");
7472 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7473}
7474
7475
7476/** Opcode 0x1f. */
7477FNIEMOP_DEF(iemOp_pop_DS)
7478{
7479 IEMOP_MNEMONIC("pop ds");
7480 IEMOP_HLP_NO_LOCK_PREFIX();
7481 IEMOP_HLP_NO_64BIT();
7482 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7483}
7484
7485
7486/** Opcode 0x20. */
7487FNIEMOP_DEF(iemOp_and_Eb_Gb)
7488{
7489 IEMOP_MNEMONIC("and Eb,Gb");
7490 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7492}
7493
7494
7495/** Opcode 0x21. */
7496FNIEMOP_DEF(iemOp_and_Ev_Gv)
7497{
7498 IEMOP_MNEMONIC("and Ev,Gv");
7499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7501}
7502
7503
7504/** Opcode 0x22. */
7505FNIEMOP_DEF(iemOp_and_Gb_Eb)
7506{
7507 IEMOP_MNEMONIC("and Gb,Eb");
7508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7509 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7510}
7511
7512
7513/** Opcode 0x23. */
7514FNIEMOP_DEF(iemOp_and_Gv_Ev)
7515{
7516 IEMOP_MNEMONIC("and Gv,Ev");
7517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7518 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7519}
7520
7521
7522/** Opcode 0x24. */
7523FNIEMOP_DEF(iemOp_and_Al_Ib)
7524{
7525 IEMOP_MNEMONIC("and al,Ib");
7526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7527 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7528}
7529
7530
7531/** Opcode 0x25. */
7532FNIEMOP_DEF(iemOp_and_eAX_Iz)
7533{
7534 IEMOP_MNEMONIC("and rAX,Iz");
7535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7537}
7538
7539
7540/** Opcode 0x26. */
7541FNIEMOP_DEF(iemOp_seg_ES)
7542{
7543 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7544 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7545 pIemCpu->iEffSeg = X86_SREG_ES;
7546
7547 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7548 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7549}
7550
7551
7552/** Opcode 0x27. */
7553FNIEMOP_DEF(iemOp_daa)
7554{
7555 IEMOP_MNEMONIC("daa AL");
7556 IEMOP_HLP_NO_64BIT();
7557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7559 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7560}
7561
7562
7563/** Opcode 0x28. */
7564FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7565{
7566 IEMOP_MNEMONIC("sub Eb,Gb");
7567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7568}
7569
7570
7571/** Opcode 0x29. */
7572FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7573{
7574 IEMOP_MNEMONIC("sub Ev,Gv");
7575 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7576}
7577
7578
7579/** Opcode 0x2a. */
7580FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7581{
7582 IEMOP_MNEMONIC("sub Gb,Eb");
7583 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7584}
7585
7586
7587/** Opcode 0x2b. */
7588FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7589{
7590 IEMOP_MNEMONIC("sub Gv,Ev");
7591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7592}
7593
7594
7595/** Opcode 0x2c. */
7596FNIEMOP_DEF(iemOp_sub_Al_Ib)
7597{
7598 IEMOP_MNEMONIC("sub al,Ib");
7599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7600}
7601
7602
7603/** Opcode 0x2d. */
7604FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7605{
7606 IEMOP_MNEMONIC("sub rAX,Iz");
7607 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7608}
7609
7610
7611/** Opcode 0x2e. */
7612FNIEMOP_DEF(iemOp_seg_CS)
7613{
7614 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7615 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7616 pIemCpu->iEffSeg = X86_SREG_CS;
7617
7618 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7619 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7620}
7621
7622
7623/** Opcode 0x2f. */
7624FNIEMOP_DEF(iemOp_das)
7625{
7626 IEMOP_MNEMONIC("das AL");
7627 IEMOP_HLP_NO_64BIT();
7628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7629 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7630 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7631}
7632
7633
7634/** Opcode 0x30. */
7635FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7636{
7637 IEMOP_MNEMONIC("xor Eb,Gb");
7638 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7639 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7640}
7641
7642
7643/** Opcode 0x31. */
7644FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7645{
7646 IEMOP_MNEMONIC("xor Ev,Gv");
7647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7648 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7649}
7650
7651
7652/** Opcode 0x32. */
7653FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7654{
7655 IEMOP_MNEMONIC("xor Gb,Eb");
7656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7658}
7659
7660
7661/** Opcode 0x33. */
7662FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7663{
7664 IEMOP_MNEMONIC("xor Gv,Ev");
7665 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7666 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7667}
7668
7669
7670/** Opcode 0x34. */
7671FNIEMOP_DEF(iemOp_xor_Al_Ib)
7672{
7673 IEMOP_MNEMONIC("xor al,Ib");
7674 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7675 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7676}
7677
7678
7679/** Opcode 0x35. */
7680FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7681{
7682 IEMOP_MNEMONIC("xor rAX,Iz");
7683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7684 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7685}
7686
7687
7688/** Opcode 0x36. */
7689FNIEMOP_DEF(iemOp_seg_SS)
7690{
7691 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7692 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7693 pIemCpu->iEffSeg = X86_SREG_SS;
7694
7695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7697}
7698
7699
7700/** Opcode 0x37. */
7701FNIEMOP_STUB(iemOp_aaa);
7702
7703
7704/** Opcode 0x38. */
7705FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7706{
7707 IEMOP_MNEMONIC("cmp Eb,Gb");
7708 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7709 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7710}
7711
7712
7713/** Opcode 0x39. */
7714FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7715{
7716 IEMOP_MNEMONIC("cmp Ev,Gv");
7717 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7718 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7719}
7720
7721
7722/** Opcode 0x3a. */
7723FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7724{
7725 IEMOP_MNEMONIC("cmp Gb,Eb");
7726 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7727}
7728
7729
7730/** Opcode 0x3b. */
7731FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7732{
7733 IEMOP_MNEMONIC("cmp Gv,Ev");
7734 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7735}
7736
7737
7738/** Opcode 0x3c. */
7739FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7740{
7741 IEMOP_MNEMONIC("cmp al,Ib");
7742 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7743}
7744
7745
7746/** Opcode 0x3d. */
7747FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7748{
7749 IEMOP_MNEMONIC("cmp rAX,Iz");
7750 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7751}
7752
7753
7754/** Opcode 0x3e. */
7755FNIEMOP_DEF(iemOp_seg_DS)
7756{
7757 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7758 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7759 pIemCpu->iEffSeg = X86_SREG_DS;
7760
7761 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7762 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7763}
7764
7765
7766/** Opcode 0x3f. */
7767FNIEMOP_STUB(iemOp_aas);
7768
7769/**
7770 * Common 'inc/dec/not/neg register' helper.
7771 */
7772FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7773{
7774 IEMOP_HLP_NO_LOCK_PREFIX();
7775 switch (pIemCpu->enmEffOpSize)
7776 {
7777 case IEMMODE_16BIT:
7778 IEM_MC_BEGIN(2, 0);
7779 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7780 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7781 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7782 IEM_MC_REF_EFLAGS(pEFlags);
7783 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7784 IEM_MC_ADVANCE_RIP();
7785 IEM_MC_END();
7786 return VINF_SUCCESS;
7787
7788 case IEMMODE_32BIT:
7789 IEM_MC_BEGIN(2, 0);
7790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7791 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7792 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7793 IEM_MC_REF_EFLAGS(pEFlags);
7794 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7795 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7796 IEM_MC_ADVANCE_RIP();
7797 IEM_MC_END();
7798 return VINF_SUCCESS;
7799
7800 case IEMMODE_64BIT:
7801 IEM_MC_BEGIN(2, 0);
7802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7803 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7804 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7805 IEM_MC_REF_EFLAGS(pEFlags);
7806 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7807 IEM_MC_ADVANCE_RIP();
7808 IEM_MC_END();
7809 return VINF_SUCCESS;
7810 }
7811 return VINF_SUCCESS;
7812}
7813
7814
7815/** Opcode 0x40. */
7816FNIEMOP_DEF(iemOp_inc_eAX)
7817{
7818 /*
7819 * This is a REX prefix in 64-bit mode.
7820 */
7821 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7822 {
7823 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7824 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7825
7826 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7827 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7828 }
7829
7830 IEMOP_MNEMONIC("inc eAX");
7831 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7832}
7833
7834
7835/** Opcode 0x41. */
7836FNIEMOP_DEF(iemOp_inc_eCX)
7837{
7838 /*
7839 * This is a REX prefix in 64-bit mode.
7840 */
7841 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7842 {
7843 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7844 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7845 pIemCpu->uRexB = 1 << 3;
7846
7847 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7848 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7849 }
7850
7851 IEMOP_MNEMONIC("inc eCX");
7852 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7853}
7854
7855
7856/** Opcode 0x42. */
7857FNIEMOP_DEF(iemOp_inc_eDX)
7858{
7859 /*
7860 * This is a REX prefix in 64-bit mode.
7861 */
7862 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7863 {
7864 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7865 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7866 pIemCpu->uRexIndex = 1 << 3;
7867
7868 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7869 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7870 }
7871
7872 IEMOP_MNEMONIC("inc eDX");
7873 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7874}
7875
7876
7877
7878/** Opcode 0x43. */
7879FNIEMOP_DEF(iemOp_inc_eBX)
7880{
7881 /*
7882 * This is a REX prefix in 64-bit mode.
7883 */
7884 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7885 {
7886 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7887 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7888 pIemCpu->uRexB = 1 << 3;
7889 pIemCpu->uRexIndex = 1 << 3;
7890
7891 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7892 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7893 }
7894
7895 IEMOP_MNEMONIC("inc eBX");
7896 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7897}
7898
7899
7900/** Opcode 0x44. */
7901FNIEMOP_DEF(iemOp_inc_eSP)
7902{
7903 /*
7904 * This is a REX prefix in 64-bit mode.
7905 */
7906 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7907 {
7908 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7909 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7910 pIemCpu->uRexReg = 1 << 3;
7911
7912 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7913 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7914 }
7915
7916 IEMOP_MNEMONIC("inc eSP");
7917 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7918}
7919
7920
7921/** Opcode 0x45. */
7922FNIEMOP_DEF(iemOp_inc_eBP)
7923{
7924 /*
7925 * This is a REX prefix in 64-bit mode.
7926 */
7927 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7928 {
7929 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7930 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7931 pIemCpu->uRexReg = 1 << 3;
7932 pIemCpu->uRexB = 1 << 3;
7933
7934 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7935 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7936 }
7937
7938 IEMOP_MNEMONIC("inc eBP");
7939 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7940}
7941
7942
7943/** Opcode 0x46. */
7944FNIEMOP_DEF(iemOp_inc_eSI)
7945{
7946 /*
7947 * This is a REX prefix in 64-bit mode.
7948 */
7949 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7950 {
7951 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7952 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7953 pIemCpu->uRexReg = 1 << 3;
7954 pIemCpu->uRexIndex = 1 << 3;
7955
7956 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7957 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7958 }
7959
7960 IEMOP_MNEMONIC("inc eSI");
7961 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7962}
7963
7964
7965/** Opcode 0x47. */
7966FNIEMOP_DEF(iemOp_inc_eDI)
7967{
7968 /*
7969 * This is a REX prefix in 64-bit mode.
7970 */
7971 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7972 {
7973 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7974 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7975 pIemCpu->uRexReg = 1 << 3;
7976 pIemCpu->uRexB = 1 << 3;
7977 pIemCpu->uRexIndex = 1 << 3;
7978
7979 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7980 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7981 }
7982
7983 IEMOP_MNEMONIC("inc eDI");
7984 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7985}
7986
7987
7988/** Opcode 0x48. */
7989FNIEMOP_DEF(iemOp_dec_eAX)
7990{
7991 /*
7992 * This is a REX prefix in 64-bit mode.
7993 */
7994 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7995 {
7996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7997 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7998 iemRecalEffOpSize(pIemCpu);
7999
8000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8002 }
8003
8004 IEMOP_MNEMONIC("dec eAX");
8005 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8006}
8007
8008
8009/** Opcode 0x49. */
8010FNIEMOP_DEF(iemOp_dec_eCX)
8011{
8012 /*
8013 * This is a REX prefix in 64-bit mode.
8014 */
8015 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8016 {
8017 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8018 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8019 pIemCpu->uRexB = 1 << 3;
8020 iemRecalEffOpSize(pIemCpu);
8021
8022 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8023 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8024 }
8025
8026 IEMOP_MNEMONIC("dec eCX");
8027 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8028}
8029
8030
8031/** Opcode 0x4a. */
8032FNIEMOP_DEF(iemOp_dec_eDX)
8033{
8034 /*
8035 * This is a REX prefix in 64-bit mode.
8036 */
8037 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8038 {
8039 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8040 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8041 pIemCpu->uRexIndex = 1 << 3;
8042 iemRecalEffOpSize(pIemCpu);
8043
8044 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8045 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8046 }
8047
8048 IEMOP_MNEMONIC("dec eDX");
8049 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8050}
8051
8052
8053/** Opcode 0x4b. */
8054FNIEMOP_DEF(iemOp_dec_eBX)
8055{
8056 /*
8057 * This is a REX prefix in 64-bit mode.
8058 */
8059 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8060 {
8061 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8062 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8063 pIemCpu->uRexB = 1 << 3;
8064 pIemCpu->uRexIndex = 1 << 3;
8065 iemRecalEffOpSize(pIemCpu);
8066
8067 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8068 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8069 }
8070
8071 IEMOP_MNEMONIC("dec eBX");
8072 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8073}
8074
8075
8076/** Opcode 0x4c. */
8077FNIEMOP_DEF(iemOp_dec_eSP)
8078{
8079 /*
8080 * This is a REX prefix in 64-bit mode.
8081 */
8082 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8083 {
8084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8085 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8086 pIemCpu->uRexReg = 1 << 3;
8087 iemRecalEffOpSize(pIemCpu);
8088
8089 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8090 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8091 }
8092
8093 IEMOP_MNEMONIC("dec eSP");
8094 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8095}
8096
8097
8098/** Opcode 0x4d. */
8099FNIEMOP_DEF(iemOp_dec_eBP)
8100{
8101 /*
8102 * This is a REX prefix in 64-bit mode.
8103 */
8104 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8105 {
8106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8107 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8108 pIemCpu->uRexReg = 1 << 3;
8109 pIemCpu->uRexB = 1 << 3;
8110 iemRecalEffOpSize(pIemCpu);
8111
8112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8114 }
8115
8116 IEMOP_MNEMONIC("dec eBP");
8117 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8118}
8119
8120
8121/** Opcode 0x4e. */
8122FNIEMOP_DEF(iemOp_dec_eSI)
8123{
8124 /*
8125 * This is a REX prefix in 64-bit mode.
8126 */
8127 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8128 {
8129 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8130 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8131 pIemCpu->uRexReg = 1 << 3;
8132 pIemCpu->uRexIndex = 1 << 3;
8133 iemRecalEffOpSize(pIemCpu);
8134
8135 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8136 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8137 }
8138
8139 IEMOP_MNEMONIC("dec eSI");
8140 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8141}
8142
8143
8144/** Opcode 0x4f. */
8145FNIEMOP_DEF(iemOp_dec_eDI)
8146{
8147 /*
8148 * This is a REX prefix in 64-bit mode.
8149 */
8150 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8151 {
8152 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8153 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8154 pIemCpu->uRexReg = 1 << 3;
8155 pIemCpu->uRexB = 1 << 3;
8156 pIemCpu->uRexIndex = 1 << 3;
8157 iemRecalEffOpSize(pIemCpu);
8158
8159 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8160 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8161 }
8162
8163 IEMOP_MNEMONIC("dec eDI");
8164 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8165}
8166
8167
8168/**
8169 * Common 'push register' helper.
8170 */
8171FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8172{
8173 IEMOP_HLP_NO_LOCK_PREFIX();
8174 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8175 {
8176 iReg |= pIemCpu->uRexB;
8177 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8178 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8179 }
8180
8181 switch (pIemCpu->enmEffOpSize)
8182 {
8183 case IEMMODE_16BIT:
8184 IEM_MC_BEGIN(0, 1);
8185 IEM_MC_LOCAL(uint16_t, u16Value);
8186 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8187 IEM_MC_PUSH_U16(u16Value);
8188 IEM_MC_ADVANCE_RIP();
8189 IEM_MC_END();
8190 break;
8191
8192 case IEMMODE_32BIT:
8193 IEM_MC_BEGIN(0, 1);
8194 IEM_MC_LOCAL(uint32_t, u32Value);
8195 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8196 IEM_MC_PUSH_U32(u32Value);
8197 IEM_MC_ADVANCE_RIP();
8198 IEM_MC_END();
8199 break;
8200
8201 case IEMMODE_64BIT:
8202 IEM_MC_BEGIN(0, 1);
8203 IEM_MC_LOCAL(uint64_t, u64Value);
8204 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8205 IEM_MC_PUSH_U64(u64Value);
8206 IEM_MC_ADVANCE_RIP();
8207 IEM_MC_END();
8208 break;
8209 }
8210
8211 return VINF_SUCCESS;
8212}
8213
8214
8215/** Opcode 0x50. */
8216FNIEMOP_DEF(iemOp_push_eAX)
8217{
8218 IEMOP_MNEMONIC("push rAX");
8219 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8220}
8221
8222
8223/** Opcode 0x51. */
8224FNIEMOP_DEF(iemOp_push_eCX)
8225{
8226 IEMOP_MNEMONIC("push rCX");
8227 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8228}
8229
8230
8231/** Opcode 0x52. */
8232FNIEMOP_DEF(iemOp_push_eDX)
8233{
8234 IEMOP_MNEMONIC("push rDX");
8235 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8236}
8237
8238
8239/** Opcode 0x53. */
8240FNIEMOP_DEF(iemOp_push_eBX)
8241{
8242 IEMOP_MNEMONIC("push rBX");
8243 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8244}
8245
8246
8247/** Opcode 0x54. */
8248FNIEMOP_DEF(iemOp_push_eSP)
8249{
8250 IEMOP_MNEMONIC("push rSP");
8251 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8252 {
8253 IEM_MC_BEGIN(0, 1);
8254 IEM_MC_LOCAL(uint16_t, u16Value);
8255 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8256 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8257 IEM_MC_PUSH_U16(u16Value);
8258 IEM_MC_ADVANCE_RIP();
8259 IEM_MC_END();
8260 }
8261 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8262}
8263
8264
8265/** Opcode 0x55. */
8266FNIEMOP_DEF(iemOp_push_eBP)
8267{
8268 IEMOP_MNEMONIC("push rBP");
8269 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8270}
8271
8272
8273/** Opcode 0x56. */
8274FNIEMOP_DEF(iemOp_push_eSI)
8275{
8276 IEMOP_MNEMONIC("push rSI");
8277 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8278}
8279
8280
8281/** Opcode 0x57. */
8282FNIEMOP_DEF(iemOp_push_eDI)
8283{
8284 IEMOP_MNEMONIC("push rDI");
8285 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8286}
8287
8288
8289/**
8290 * Common 'pop register' helper.
8291 */
8292FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8293{
8294 IEMOP_HLP_NO_LOCK_PREFIX();
8295 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8296 {
8297 iReg |= pIemCpu->uRexB;
8298 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8299 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8300 }
8301
8302 switch (pIemCpu->enmEffOpSize)
8303 {
8304 case IEMMODE_16BIT:
8305 IEM_MC_BEGIN(0, 1);
8306 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8307 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8308 IEM_MC_POP_U16(pu16Dst);
8309 IEM_MC_ADVANCE_RIP();
8310 IEM_MC_END();
8311 break;
8312
8313 case IEMMODE_32BIT:
8314 IEM_MC_BEGIN(0, 1);
8315 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8316 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8317 IEM_MC_POP_U32(pu32Dst);
8318 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8319 IEM_MC_ADVANCE_RIP();
8320 IEM_MC_END();
8321 break;
8322
8323 case IEMMODE_64BIT:
8324 IEM_MC_BEGIN(0, 1);
8325 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8326 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8327 IEM_MC_POP_U64(pu64Dst);
8328 IEM_MC_ADVANCE_RIP();
8329 IEM_MC_END();
8330 break;
8331 }
8332
8333 return VINF_SUCCESS;
8334}
8335
8336
8337/** Opcode 0x58. */
8338FNIEMOP_DEF(iemOp_pop_eAX)
8339{
8340 IEMOP_MNEMONIC("pop rAX");
8341 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8342}
8343
8344
8345/** Opcode 0x59. */
8346FNIEMOP_DEF(iemOp_pop_eCX)
8347{
8348 IEMOP_MNEMONIC("pop rCX");
8349 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8350}
8351
8352
8353/** Opcode 0x5a. */
8354FNIEMOP_DEF(iemOp_pop_eDX)
8355{
8356 IEMOP_MNEMONIC("pop rDX");
8357 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8358}
8359
8360
8361/** Opcode 0x5b. */
8362FNIEMOP_DEF(iemOp_pop_eBX)
8363{
8364 IEMOP_MNEMONIC("pop rBX");
8365 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8366}
8367
8368
8369/** Opcode 0x5c. */
8370FNIEMOP_DEF(iemOp_pop_eSP)
8371{
8372 IEMOP_MNEMONIC("pop rSP");
8373 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8374 {
8375 if (pIemCpu->uRexB)
8376 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8377 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8378 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8379 }
8380
8381 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8382 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8383 /** @todo add testcase for this instruction. */
8384 switch (pIemCpu->enmEffOpSize)
8385 {
8386 case IEMMODE_16BIT:
8387 IEM_MC_BEGIN(0, 1);
8388 IEM_MC_LOCAL(uint16_t, u16Dst);
8389 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8390 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8391 IEM_MC_ADVANCE_RIP();
8392 IEM_MC_END();
8393 break;
8394
8395 case IEMMODE_32BIT:
8396 IEM_MC_BEGIN(0, 1);
8397 IEM_MC_LOCAL(uint32_t, u32Dst);
8398 IEM_MC_POP_U32(&u32Dst);
8399 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8400 IEM_MC_ADVANCE_RIP();
8401 IEM_MC_END();
8402 break;
8403
8404 case IEMMODE_64BIT:
8405 IEM_MC_BEGIN(0, 1);
8406 IEM_MC_LOCAL(uint64_t, u64Dst);
8407 IEM_MC_POP_U64(&u64Dst);
8408 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8409 IEM_MC_ADVANCE_RIP();
8410 IEM_MC_END();
8411 break;
8412 }
8413
8414 return VINF_SUCCESS;
8415}
8416
8417
8418/** Opcode 0x5d. */
8419FNIEMOP_DEF(iemOp_pop_eBP)
8420{
8421 IEMOP_MNEMONIC("pop rBP");
8422 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8423}
8424
8425
8426/** Opcode 0x5e. */
8427FNIEMOP_DEF(iemOp_pop_eSI)
8428{
8429 IEMOP_MNEMONIC("pop rSI");
8430 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8431}
8432
8433
8434/** Opcode 0x5f. */
8435FNIEMOP_DEF(iemOp_pop_eDI)
8436{
8437 IEMOP_MNEMONIC("pop rDI");
8438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8439}
8440
8441
8442/** Opcode 0x60. */
8443FNIEMOP_DEF(iemOp_pusha)
8444{
8445 IEMOP_MNEMONIC("pusha");
8446 IEMOP_HLP_MIN_186();
8447 IEMOP_HLP_NO_64BIT();
8448 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8449 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8450 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8451 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8452}
8453
8454
8455/** Opcode 0x61. */
8456FNIEMOP_DEF(iemOp_popa)
8457{
8458 IEMOP_MNEMONIC("popa");
8459 IEMOP_HLP_MIN_186();
8460 IEMOP_HLP_NO_64BIT();
8461 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8462 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8463 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8464 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8465}
8466
8467
8468/** Opcode 0x62. */
8469FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8470// IEMOP_HLP_MIN_186();
8471
8472
8473/** Opcode 0x63 - non-64-bit modes. */
8474FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8475{
8476 IEMOP_MNEMONIC("arpl Ew,Gw");
8477 IEMOP_HLP_MIN_286();
8478 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8480
8481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8482 {
8483 /* Register */
8484 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8485 IEM_MC_BEGIN(3, 0);
8486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8487 IEM_MC_ARG(uint16_t, u16Src, 1);
8488 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8489
8490 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8491 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8492 IEM_MC_REF_EFLAGS(pEFlags);
8493 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8494
8495 IEM_MC_ADVANCE_RIP();
8496 IEM_MC_END();
8497 }
8498 else
8499 {
8500 /* Memory */
8501 IEM_MC_BEGIN(3, 2);
8502 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8503 IEM_MC_ARG(uint16_t, u16Src, 1);
8504 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8506
8507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8508 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8509 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8510 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8511 IEM_MC_FETCH_EFLAGS(EFlags);
8512 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8513
8514 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8515 IEM_MC_COMMIT_EFLAGS(EFlags);
8516 IEM_MC_ADVANCE_RIP();
8517 IEM_MC_END();
8518 }
8519 return VINF_SUCCESS;
8520
8521}
8522
8523
8524/** Opcode 0x63.
8525 * @note This is a weird one. It works like a regular move instruction if
8526 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8527 * @todo This definitely needs a testcase to verify the odd cases. */
8528FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8529{
8530 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8531
8532 IEMOP_MNEMONIC("movsxd Gv,Ev");
8533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8534
8535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8536 {
8537 /*
8538 * Register to register.
8539 */
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541 IEM_MC_BEGIN(0, 1);
8542 IEM_MC_LOCAL(uint64_t, u64Value);
8543 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8544 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8545 IEM_MC_ADVANCE_RIP();
8546 IEM_MC_END();
8547 }
8548 else
8549 {
8550 /*
8551 * We're loading a register from memory.
8552 */
8553 IEM_MC_BEGIN(0, 2);
8554 IEM_MC_LOCAL(uint64_t, u64Value);
8555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8558 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8559 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8560 IEM_MC_ADVANCE_RIP();
8561 IEM_MC_END();
8562 }
8563 return VINF_SUCCESS;
8564}
8565
8566
8567/** Opcode 0x64. */
8568FNIEMOP_DEF(iemOp_seg_FS)
8569{
8570 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8571 IEMOP_HLP_MIN_386();
8572
8573 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8574 pIemCpu->iEffSeg = X86_SREG_FS;
8575
8576 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8577 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8578}
8579
8580
8581/** Opcode 0x65. */
8582FNIEMOP_DEF(iemOp_seg_GS)
8583{
8584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8585 IEMOP_HLP_MIN_386();
8586
8587 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8588 pIemCpu->iEffSeg = X86_SREG_GS;
8589
8590 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8591 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8592}
8593
8594
8595/** Opcode 0x66. */
8596FNIEMOP_DEF(iemOp_op_size)
8597{
8598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8599 IEMOP_HLP_MIN_386();
8600
8601 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8602 iemRecalEffOpSize(pIemCpu);
8603
8604 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8605 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8606}
8607
8608
8609/** Opcode 0x67. */
8610FNIEMOP_DEF(iemOp_addr_size)
8611{
8612 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8613 IEMOP_HLP_MIN_386();
8614
8615 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8616 switch (pIemCpu->enmDefAddrMode)
8617 {
8618 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8619 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8620 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8621 default: AssertFailed();
8622 }
8623
8624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8625 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8626}
8627
8628
8629/** Opcode 0x68. */
8630FNIEMOP_DEF(iemOp_push_Iz)
8631{
8632 IEMOP_MNEMONIC("push Iz");
8633 IEMOP_HLP_MIN_186();
8634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8635 switch (pIemCpu->enmEffOpSize)
8636 {
8637 case IEMMODE_16BIT:
8638 {
8639 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8640 IEMOP_HLP_NO_LOCK_PREFIX();
8641 IEM_MC_BEGIN(0,0);
8642 IEM_MC_PUSH_U16(u16Imm);
8643 IEM_MC_ADVANCE_RIP();
8644 IEM_MC_END();
8645 return VINF_SUCCESS;
8646 }
8647
8648 case IEMMODE_32BIT:
8649 {
8650 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8651 IEMOP_HLP_NO_LOCK_PREFIX();
8652 IEM_MC_BEGIN(0,0);
8653 IEM_MC_PUSH_U32(u32Imm);
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 return VINF_SUCCESS;
8657 }
8658
8659 case IEMMODE_64BIT:
8660 {
8661 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8662 IEMOP_HLP_NO_LOCK_PREFIX();
8663 IEM_MC_BEGIN(0,0);
8664 IEM_MC_PUSH_U64(u64Imm);
8665 IEM_MC_ADVANCE_RIP();
8666 IEM_MC_END();
8667 return VINF_SUCCESS;
8668 }
8669
8670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8671 }
8672}
8673
8674
8675/** Opcode 0x69. */
8676FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8677{
8678 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8679 IEMOP_HLP_MIN_186();
8680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8682
8683 switch (pIemCpu->enmEffOpSize)
8684 {
8685 case IEMMODE_16BIT:
8686 {
8687 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8688 {
8689 /* register operand */
8690 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8692
8693 IEM_MC_BEGIN(3, 1);
8694 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8695 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8696 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8697 IEM_MC_LOCAL(uint16_t, u16Tmp);
8698
8699 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8700 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8701 IEM_MC_REF_EFLAGS(pEFlags);
8702 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8703 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8704
8705 IEM_MC_ADVANCE_RIP();
8706 IEM_MC_END();
8707 }
8708 else
8709 {
8710 /* memory operand */
8711 IEM_MC_BEGIN(3, 2);
8712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8713 IEM_MC_ARG(uint16_t, u16Src, 1);
8714 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8715 IEM_MC_LOCAL(uint16_t, u16Tmp);
8716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8717
8718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8719 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8720 IEM_MC_ASSIGN(u16Src, u16Imm);
8721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8722 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8723 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8724 IEM_MC_REF_EFLAGS(pEFlags);
8725 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8726 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8727
8728 IEM_MC_ADVANCE_RIP();
8729 IEM_MC_END();
8730 }
8731 return VINF_SUCCESS;
8732 }
8733
8734 case IEMMODE_32BIT:
8735 {
8736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8737 {
8738 /* register operand */
8739 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741
8742 IEM_MC_BEGIN(3, 1);
8743 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8744 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8745 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8746 IEM_MC_LOCAL(uint32_t, u32Tmp);
8747
8748 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8749 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8750 IEM_MC_REF_EFLAGS(pEFlags);
8751 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8752 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8753
8754 IEM_MC_ADVANCE_RIP();
8755 IEM_MC_END();
8756 }
8757 else
8758 {
8759 /* memory operand */
8760 IEM_MC_BEGIN(3, 2);
8761 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8762 IEM_MC_ARG(uint32_t, u32Src, 1);
8763 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8764 IEM_MC_LOCAL(uint32_t, u32Tmp);
8765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8766
8767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8768 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8769 IEM_MC_ASSIGN(u32Src, u32Imm);
8770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8771 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8772 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8773 IEM_MC_REF_EFLAGS(pEFlags);
8774 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8775 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8776
8777 IEM_MC_ADVANCE_RIP();
8778 IEM_MC_END();
8779 }
8780 return VINF_SUCCESS;
8781 }
8782
8783 case IEMMODE_64BIT:
8784 {
8785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8786 {
8787 /* register operand */
8788 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790
8791 IEM_MC_BEGIN(3, 1);
8792 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8793 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8794 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8795 IEM_MC_LOCAL(uint64_t, u64Tmp);
8796
8797 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8798 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8799 IEM_MC_REF_EFLAGS(pEFlags);
8800 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8801 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8802
8803 IEM_MC_ADVANCE_RIP();
8804 IEM_MC_END();
8805 }
8806 else
8807 {
8808 /* memory operand */
8809 IEM_MC_BEGIN(3, 2);
8810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8811 IEM_MC_ARG(uint64_t, u64Src, 1);
8812 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8813 IEM_MC_LOCAL(uint64_t, u64Tmp);
8814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8815
8816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8817 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8818 IEM_MC_ASSIGN(u64Src, u64Imm);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8821 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8822 IEM_MC_REF_EFLAGS(pEFlags);
8823 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8824 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8825
8826 IEM_MC_ADVANCE_RIP();
8827 IEM_MC_END();
8828 }
8829 return VINF_SUCCESS;
8830 }
8831 }
8832 AssertFailedReturn(VERR_IEM_IPE_9);
8833}
8834
8835
8836/** Opcode 0x6a. */
8837FNIEMOP_DEF(iemOp_push_Ib)
8838{
8839 IEMOP_MNEMONIC("push Ib");
8840 IEMOP_HLP_MIN_186();
8841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8842 IEMOP_HLP_NO_LOCK_PREFIX();
8843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8844
8845 IEM_MC_BEGIN(0,0);
8846 switch (pIemCpu->enmEffOpSize)
8847 {
8848 case IEMMODE_16BIT:
8849 IEM_MC_PUSH_U16(i8Imm);
8850 break;
8851 case IEMMODE_32BIT:
8852 IEM_MC_PUSH_U32(i8Imm);
8853 break;
8854 case IEMMODE_64BIT:
8855 IEM_MC_PUSH_U64(i8Imm);
8856 break;
8857 }
8858 IEM_MC_ADVANCE_RIP();
8859 IEM_MC_END();
8860 return VINF_SUCCESS;
8861}
8862
8863
8864/** Opcode 0x6b. */
8865FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8866{
8867 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8868 IEMOP_HLP_MIN_186();
8869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8870 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8871
8872 switch (pIemCpu->enmEffOpSize)
8873 {
8874 case IEMMODE_16BIT:
8875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8876 {
8877 /* register operand */
8878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8880
8881 IEM_MC_BEGIN(3, 1);
8882 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8883 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8885 IEM_MC_LOCAL(uint16_t, u16Tmp);
8886
8887 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8888 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8889 IEM_MC_REF_EFLAGS(pEFlags);
8890 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8891 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8892
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 }
8896 else
8897 {
8898 /* memory operand */
8899 IEM_MC_BEGIN(3, 2);
8900 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8901 IEM_MC_ARG(uint16_t, u16Src, 1);
8902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8903 IEM_MC_LOCAL(uint16_t, u16Tmp);
8904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8905
8906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8907 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8908 IEM_MC_ASSIGN(u16Src, u16Imm);
8909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8910 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8911 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8912 IEM_MC_REF_EFLAGS(pEFlags);
8913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8914 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8915
8916 IEM_MC_ADVANCE_RIP();
8917 IEM_MC_END();
8918 }
8919 return VINF_SUCCESS;
8920
8921 case IEMMODE_32BIT:
8922 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8923 {
8924 /* register operand */
8925 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927
8928 IEM_MC_BEGIN(3, 1);
8929 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8930 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8932 IEM_MC_LOCAL(uint32_t, u32Tmp);
8933
8934 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8935 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8936 IEM_MC_REF_EFLAGS(pEFlags);
8937 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8938 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8939
8940 IEM_MC_ADVANCE_RIP();
8941 IEM_MC_END();
8942 }
8943 else
8944 {
8945 /* memory operand */
8946 IEM_MC_BEGIN(3, 2);
8947 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8948 IEM_MC_ARG(uint32_t, u32Src, 1);
8949 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8950 IEM_MC_LOCAL(uint32_t, u32Tmp);
8951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8952
8953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8954 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8955 IEM_MC_ASSIGN(u32Src, u32Imm);
8956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8957 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8958 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8959 IEM_MC_REF_EFLAGS(pEFlags);
8960 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8961 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8962
8963 IEM_MC_ADVANCE_RIP();
8964 IEM_MC_END();
8965 }
8966 return VINF_SUCCESS;
8967
8968 case IEMMODE_64BIT:
8969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8970 {
8971 /* register operand */
8972 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8974
8975 IEM_MC_BEGIN(3, 1);
8976 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8977 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8979 IEM_MC_LOCAL(uint64_t, u64Tmp);
8980
8981 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8982 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8983 IEM_MC_REF_EFLAGS(pEFlags);
8984 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8985 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8986
8987 IEM_MC_ADVANCE_RIP();
8988 IEM_MC_END();
8989 }
8990 else
8991 {
8992 /* memory operand */
8993 IEM_MC_BEGIN(3, 2);
8994 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8995 IEM_MC_ARG(uint64_t, u64Src, 1);
8996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8997 IEM_MC_LOCAL(uint64_t, u64Tmp);
8998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8999
9000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9001 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9002 IEM_MC_ASSIGN(u64Src, u64Imm);
9003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9004 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9005 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9006 IEM_MC_REF_EFLAGS(pEFlags);
9007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9008 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9009
9010 IEM_MC_ADVANCE_RIP();
9011 IEM_MC_END();
9012 }
9013 return VINF_SUCCESS;
9014 }
9015 AssertFailedReturn(VERR_IEM_IPE_8);
9016}
9017
9018
9019/** Opcode 0x6c. */
9020FNIEMOP_DEF(iemOp_insb_Yb_DX)
9021{
9022 IEMOP_HLP_MIN_186();
9023 IEMOP_HLP_NO_LOCK_PREFIX();
9024 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9025 {
9026 IEMOP_MNEMONIC("rep ins Yb,DX");
9027 switch (pIemCpu->enmEffAddrMode)
9028 {
9029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9033 }
9034 }
9035 else
9036 {
9037 IEMOP_MNEMONIC("ins Yb,DX");
9038 switch (pIemCpu->enmEffAddrMode)
9039 {
9040 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9041 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9042 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9044 }
9045 }
9046}
9047
9048
9049/** Opcode 0x6d. */
9050FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9051{
9052 IEMOP_HLP_MIN_186();
9053 IEMOP_HLP_NO_LOCK_PREFIX();
9054 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9055 {
9056 IEMOP_MNEMONIC("rep ins Yv,DX");
9057 switch (pIemCpu->enmEffOpSize)
9058 {
9059 case IEMMODE_16BIT:
9060 switch (pIemCpu->enmEffAddrMode)
9061 {
9062 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9063 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9064 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9066 }
9067 break;
9068 case IEMMODE_64BIT:
9069 case IEMMODE_32BIT:
9070 switch (pIemCpu->enmEffAddrMode)
9071 {
9072 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9073 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9074 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9076 }
9077 break;
9078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9079 }
9080 }
9081 else
9082 {
9083 IEMOP_MNEMONIC("ins Yv,DX");
9084 switch (pIemCpu->enmEffOpSize)
9085 {
9086 case IEMMODE_16BIT:
9087 switch (pIemCpu->enmEffAddrMode)
9088 {
9089 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9090 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9091 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9093 }
9094 break;
9095 case IEMMODE_64BIT:
9096 case IEMMODE_32BIT:
9097 switch (pIemCpu->enmEffAddrMode)
9098 {
9099 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9100 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9101 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9103 }
9104 break;
9105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9106 }
9107 }
9108}
9109
9110
9111/** Opcode 0x6e. */
9112FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9113{
9114 IEMOP_HLP_MIN_186();
9115 IEMOP_HLP_NO_LOCK_PREFIX();
9116 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9117 {
9118 IEMOP_MNEMONIC("rep outs DX,Yb");
9119 switch (pIemCpu->enmEffAddrMode)
9120 {
9121 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9122 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9123 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9125 }
9126 }
9127 else
9128 {
9129 IEMOP_MNEMONIC("outs DX,Yb");
9130 switch (pIemCpu->enmEffAddrMode)
9131 {
9132 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9133 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9134 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9136 }
9137 }
9138}
9139
9140
9141/** Opcode 0x6f. */
9142FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9143{
9144 IEMOP_HLP_MIN_186();
9145 IEMOP_HLP_NO_LOCK_PREFIX();
9146 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9147 {
9148 IEMOP_MNEMONIC("rep outs DX,Yv");
9149 switch (pIemCpu->enmEffOpSize)
9150 {
9151 case IEMMODE_16BIT:
9152 switch (pIemCpu->enmEffAddrMode)
9153 {
9154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9158 }
9159 break;
9160 case IEMMODE_64BIT:
9161 case IEMMODE_32BIT:
9162 switch (pIemCpu->enmEffAddrMode)
9163 {
9164 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9165 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9166 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9168 }
9169 break;
9170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9171 }
9172 }
9173 else
9174 {
9175 IEMOP_MNEMONIC("outs DX,Yv");
9176 switch (pIemCpu->enmEffOpSize)
9177 {
9178 case IEMMODE_16BIT:
9179 switch (pIemCpu->enmEffAddrMode)
9180 {
9181 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9182 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9183 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9185 }
9186 break;
9187 case IEMMODE_64BIT:
9188 case IEMMODE_32BIT:
9189 switch (pIemCpu->enmEffAddrMode)
9190 {
9191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9195 }
9196 break;
9197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9198 }
9199 }
9200}
9201
9202
9203/** Opcode 0x70. */
9204FNIEMOP_DEF(iemOp_jo_Jb)
9205{
9206 IEMOP_MNEMONIC("jo Jb");
9207 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9208 IEMOP_HLP_NO_LOCK_PREFIX();
9209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9210
9211 IEM_MC_BEGIN(0, 0);
9212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9213 IEM_MC_REL_JMP_S8(i8Imm);
9214 } IEM_MC_ELSE() {
9215 IEM_MC_ADVANCE_RIP();
9216 } IEM_MC_ENDIF();
9217 IEM_MC_END();
9218 return VINF_SUCCESS;
9219}
9220
9221
9222/** Opcode 0x71. */
9223FNIEMOP_DEF(iemOp_jno_Jb)
9224{
9225 IEMOP_MNEMONIC("jno Jb");
9226 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9227 IEMOP_HLP_NO_LOCK_PREFIX();
9228 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9229
9230 IEM_MC_BEGIN(0, 0);
9231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9232 IEM_MC_ADVANCE_RIP();
9233 } IEM_MC_ELSE() {
9234 IEM_MC_REL_JMP_S8(i8Imm);
9235 } IEM_MC_ENDIF();
9236 IEM_MC_END();
9237 return VINF_SUCCESS;
9238}
9239
9240/** Opcode 0x72. */
9241FNIEMOP_DEF(iemOp_jc_Jb)
9242{
9243 IEMOP_MNEMONIC("jc/jnae Jb");
9244 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9245 IEMOP_HLP_NO_LOCK_PREFIX();
9246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9247
9248 IEM_MC_BEGIN(0, 0);
9249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9250 IEM_MC_REL_JMP_S8(i8Imm);
9251 } IEM_MC_ELSE() {
9252 IEM_MC_ADVANCE_RIP();
9253 } IEM_MC_ENDIF();
9254 IEM_MC_END();
9255 return VINF_SUCCESS;
9256}
9257
9258
9259/** Opcode 0x73. */
9260FNIEMOP_DEF(iemOp_jnc_Jb)
9261{
9262 IEMOP_MNEMONIC("jnc/jnb Jb");
9263 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9264 IEMOP_HLP_NO_LOCK_PREFIX();
9265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9266
9267 IEM_MC_BEGIN(0, 0);
9268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9269 IEM_MC_ADVANCE_RIP();
9270 } IEM_MC_ELSE() {
9271 IEM_MC_REL_JMP_S8(i8Imm);
9272 } IEM_MC_ENDIF();
9273 IEM_MC_END();
9274 return VINF_SUCCESS;
9275}
9276
9277
9278/** Opcode 0x74. */
9279FNIEMOP_DEF(iemOp_je_Jb)
9280{
9281 IEMOP_MNEMONIC("je/jz Jb");
9282 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9283 IEMOP_HLP_NO_LOCK_PREFIX();
9284 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9285
9286 IEM_MC_BEGIN(0, 0);
9287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9288 IEM_MC_REL_JMP_S8(i8Imm);
9289 } IEM_MC_ELSE() {
9290 IEM_MC_ADVANCE_RIP();
9291 } IEM_MC_ENDIF();
9292 IEM_MC_END();
9293 return VINF_SUCCESS;
9294}
9295
9296
9297/** Opcode 0x75. */
9298FNIEMOP_DEF(iemOp_jne_Jb)
9299{
9300 IEMOP_MNEMONIC("jne/jnz Jb");
9301 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9302 IEMOP_HLP_NO_LOCK_PREFIX();
9303 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9304
9305 IEM_MC_BEGIN(0, 0);
9306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9307 IEM_MC_ADVANCE_RIP();
9308 } IEM_MC_ELSE() {
9309 IEM_MC_REL_JMP_S8(i8Imm);
9310 } IEM_MC_ENDIF();
9311 IEM_MC_END();
9312 return VINF_SUCCESS;
9313}
9314
9315
9316/** Opcode 0x76. */
9317FNIEMOP_DEF(iemOp_jbe_Jb)
9318{
9319 IEMOP_MNEMONIC("jbe/jna Jb");
9320 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9321 IEMOP_HLP_NO_LOCK_PREFIX();
9322 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9323
9324 IEM_MC_BEGIN(0, 0);
9325 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9326 IEM_MC_REL_JMP_S8(i8Imm);
9327 } IEM_MC_ELSE() {
9328 IEM_MC_ADVANCE_RIP();
9329 } IEM_MC_ENDIF();
9330 IEM_MC_END();
9331 return VINF_SUCCESS;
9332}
9333
9334
9335/** Opcode 0x77. */
9336FNIEMOP_DEF(iemOp_jnbe_Jb)
9337{
9338 IEMOP_MNEMONIC("jnbe/ja Jb");
9339 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9340 IEMOP_HLP_NO_LOCK_PREFIX();
9341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9342
9343 IEM_MC_BEGIN(0, 0);
9344 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9345 IEM_MC_ADVANCE_RIP();
9346 } IEM_MC_ELSE() {
9347 IEM_MC_REL_JMP_S8(i8Imm);
9348 } IEM_MC_ENDIF();
9349 IEM_MC_END();
9350 return VINF_SUCCESS;
9351}
9352
9353
9354/** Opcode 0x78. */
9355FNIEMOP_DEF(iemOp_js_Jb)
9356{
9357 IEMOP_MNEMONIC("js Jb");
9358 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9359 IEMOP_HLP_NO_LOCK_PREFIX();
9360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9361
9362 IEM_MC_BEGIN(0, 0);
9363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9364 IEM_MC_REL_JMP_S8(i8Imm);
9365 } IEM_MC_ELSE() {
9366 IEM_MC_ADVANCE_RIP();
9367 } IEM_MC_ENDIF();
9368 IEM_MC_END();
9369 return VINF_SUCCESS;
9370}
9371
9372
9373/** Opcode 0x79. */
9374FNIEMOP_DEF(iemOp_jns_Jb)
9375{
9376 IEMOP_MNEMONIC("jns Jb");
9377 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9378 IEMOP_HLP_NO_LOCK_PREFIX();
9379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9380
9381 IEM_MC_BEGIN(0, 0);
9382 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9383 IEM_MC_ADVANCE_RIP();
9384 } IEM_MC_ELSE() {
9385 IEM_MC_REL_JMP_S8(i8Imm);
9386 } IEM_MC_ENDIF();
9387 IEM_MC_END();
9388 return VINF_SUCCESS;
9389}
9390
9391
9392/** Opcode 0x7a. */
9393FNIEMOP_DEF(iemOp_jp_Jb)
9394{
9395 IEMOP_MNEMONIC("jp Jb");
9396 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9397 IEMOP_HLP_NO_LOCK_PREFIX();
9398 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9399
9400 IEM_MC_BEGIN(0, 0);
9401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9402 IEM_MC_REL_JMP_S8(i8Imm);
9403 } IEM_MC_ELSE() {
9404 IEM_MC_ADVANCE_RIP();
9405 } IEM_MC_ENDIF();
9406 IEM_MC_END();
9407 return VINF_SUCCESS;
9408}
9409
9410
9411/** Opcode 0x7b. */
9412FNIEMOP_DEF(iemOp_jnp_Jb)
9413{
9414 IEMOP_MNEMONIC("jnp Jb");
9415 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9416 IEMOP_HLP_NO_LOCK_PREFIX();
9417 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9418
9419 IEM_MC_BEGIN(0, 0);
9420 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9421 IEM_MC_ADVANCE_RIP();
9422 } IEM_MC_ELSE() {
9423 IEM_MC_REL_JMP_S8(i8Imm);
9424 } IEM_MC_ENDIF();
9425 IEM_MC_END();
9426 return VINF_SUCCESS;
9427}
9428
9429
9430/** Opcode 0x7c. */
9431FNIEMOP_DEF(iemOp_jl_Jb)
9432{
9433 IEMOP_MNEMONIC("jl/jnge Jb");
9434 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9435 IEMOP_HLP_NO_LOCK_PREFIX();
9436 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9437
9438 IEM_MC_BEGIN(0, 0);
9439 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9440 IEM_MC_REL_JMP_S8(i8Imm);
9441 } IEM_MC_ELSE() {
9442 IEM_MC_ADVANCE_RIP();
9443 } IEM_MC_ENDIF();
9444 IEM_MC_END();
9445 return VINF_SUCCESS;
9446}
9447
9448
9449/** Opcode 0x7d. */
9450FNIEMOP_DEF(iemOp_jnl_Jb)
9451{
9452 IEMOP_MNEMONIC("jnl/jge Jb");
9453 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9454 IEMOP_HLP_NO_LOCK_PREFIX();
9455 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9456
9457 IEM_MC_BEGIN(0, 0);
9458 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9459 IEM_MC_ADVANCE_RIP();
9460 } IEM_MC_ELSE() {
9461 IEM_MC_REL_JMP_S8(i8Imm);
9462 } IEM_MC_ENDIF();
9463 IEM_MC_END();
9464 return VINF_SUCCESS;
9465}
9466
9467
9468/** Opcode 0x7e. */
9469FNIEMOP_DEF(iemOp_jle_Jb)
9470{
9471 IEMOP_MNEMONIC("jle/jng Jb");
9472 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9473 IEMOP_HLP_NO_LOCK_PREFIX();
9474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9475
9476 IEM_MC_BEGIN(0, 0);
9477 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9478 IEM_MC_REL_JMP_S8(i8Imm);
9479 } IEM_MC_ELSE() {
9480 IEM_MC_ADVANCE_RIP();
9481 } IEM_MC_ENDIF();
9482 IEM_MC_END();
9483 return VINF_SUCCESS;
9484}
9485
9486
9487/** Opcode 0x7f. */
9488FNIEMOP_DEF(iemOp_jnle_Jb)
9489{
9490 IEMOP_MNEMONIC("jnle/jg Jb");
9491 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9492 IEMOP_HLP_NO_LOCK_PREFIX();
9493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9494
9495 IEM_MC_BEGIN(0, 0);
9496 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9497 IEM_MC_ADVANCE_RIP();
9498 } IEM_MC_ELSE() {
9499 IEM_MC_REL_JMP_S8(i8Imm);
9500 } IEM_MC_ENDIF();
9501 IEM_MC_END();
9502 return VINF_SUCCESS;
9503}
9504
9505
9506/** Opcode 0x80. */
9507FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9508{
9509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9510 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9511 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9512
9513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9514 {
9515 /* register target */
9516 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9517 IEMOP_HLP_NO_LOCK_PREFIX();
9518 IEM_MC_BEGIN(3, 0);
9519 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9520 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9522
9523 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9524 IEM_MC_REF_EFLAGS(pEFlags);
9525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9526
9527 IEM_MC_ADVANCE_RIP();
9528 IEM_MC_END();
9529 }
9530 else
9531 {
9532 /* memory target */
9533 uint32_t fAccess;
9534 if (pImpl->pfnLockedU8)
9535 fAccess = IEM_ACCESS_DATA_RW;
9536 else
9537 { /* CMP */
9538 IEMOP_HLP_NO_LOCK_PREFIX();
9539 fAccess = IEM_ACCESS_DATA_R;
9540 }
9541 IEM_MC_BEGIN(3, 2);
9542 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9543 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9545
9546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9548 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9549
9550 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9551 IEM_MC_FETCH_EFLAGS(EFlags);
9552 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9554 else
9555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9556
9557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9558 IEM_MC_COMMIT_EFLAGS(EFlags);
9559 IEM_MC_ADVANCE_RIP();
9560 IEM_MC_END();
9561 }
9562 return VINF_SUCCESS;
9563}
9564
9565
9566/** Opcode 0x81. */
9567FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9568{
9569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9570 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9571 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9572
9573 switch (pIemCpu->enmEffOpSize)
9574 {
9575 case IEMMODE_16BIT:
9576 {
9577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9578 {
9579 /* register target */
9580 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9581 IEMOP_HLP_NO_LOCK_PREFIX();
9582 IEM_MC_BEGIN(3, 0);
9583 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9584 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9586
9587 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9588 IEM_MC_REF_EFLAGS(pEFlags);
9589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9590
9591 IEM_MC_ADVANCE_RIP();
9592 IEM_MC_END();
9593 }
9594 else
9595 {
9596 /* memory target */
9597 uint32_t fAccess;
9598 if (pImpl->pfnLockedU16)
9599 fAccess = IEM_ACCESS_DATA_RW;
9600 else
9601 { /* CMP, TEST */
9602 IEMOP_HLP_NO_LOCK_PREFIX();
9603 fAccess = IEM_ACCESS_DATA_R;
9604 }
9605 IEM_MC_BEGIN(3, 2);
9606 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9607 IEM_MC_ARG(uint16_t, u16Src, 1);
9608 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9610
9611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9612 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9613 IEM_MC_ASSIGN(u16Src, u16Imm);
9614 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9615 IEM_MC_FETCH_EFLAGS(EFlags);
9616 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9618 else
9619 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9620
9621 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9622 IEM_MC_COMMIT_EFLAGS(EFlags);
9623 IEM_MC_ADVANCE_RIP();
9624 IEM_MC_END();
9625 }
9626 break;
9627 }
9628
9629 case IEMMODE_32BIT:
9630 {
9631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9632 {
9633 /* register target */
9634 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9635 IEMOP_HLP_NO_LOCK_PREFIX();
9636 IEM_MC_BEGIN(3, 0);
9637 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9638 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9640
9641 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9642 IEM_MC_REF_EFLAGS(pEFlags);
9643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9644 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9645
9646 IEM_MC_ADVANCE_RIP();
9647 IEM_MC_END();
9648 }
9649 else
9650 {
9651 /* memory target */
9652 uint32_t fAccess;
9653 if (pImpl->pfnLockedU32)
9654 fAccess = IEM_ACCESS_DATA_RW;
9655 else
9656 { /* CMP, TEST */
9657 IEMOP_HLP_NO_LOCK_PREFIX();
9658 fAccess = IEM_ACCESS_DATA_R;
9659 }
9660 IEM_MC_BEGIN(3, 2);
9661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9662 IEM_MC_ARG(uint32_t, u32Src, 1);
9663 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9665
9666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9667 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9668 IEM_MC_ASSIGN(u32Src, u32Imm);
9669 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9670 IEM_MC_FETCH_EFLAGS(EFlags);
9671 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9672 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9673 else
9674 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9675
9676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9677 IEM_MC_COMMIT_EFLAGS(EFlags);
9678 IEM_MC_ADVANCE_RIP();
9679 IEM_MC_END();
9680 }
9681 break;
9682 }
9683
9684 case IEMMODE_64BIT:
9685 {
9686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9687 {
9688 /* register target */
9689 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9690 IEMOP_HLP_NO_LOCK_PREFIX();
9691 IEM_MC_BEGIN(3, 0);
9692 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9693 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9695
9696 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9697 IEM_MC_REF_EFLAGS(pEFlags);
9698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9699
9700 IEM_MC_ADVANCE_RIP();
9701 IEM_MC_END();
9702 }
9703 else
9704 {
9705 /* memory target */
9706 uint32_t fAccess;
9707 if (pImpl->pfnLockedU64)
9708 fAccess = IEM_ACCESS_DATA_RW;
9709 else
9710 { /* CMP */
9711 IEMOP_HLP_NO_LOCK_PREFIX();
9712 fAccess = IEM_ACCESS_DATA_R;
9713 }
9714 IEM_MC_BEGIN(3, 2);
9715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9716 IEM_MC_ARG(uint64_t, u64Src, 1);
9717 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9719
9720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9721 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9722 IEM_MC_ASSIGN(u64Src, u64Imm);
9723 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9724 IEM_MC_FETCH_EFLAGS(EFlags);
9725 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9727 else
9728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9729
9730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9731 IEM_MC_COMMIT_EFLAGS(EFlags);
9732 IEM_MC_ADVANCE_RIP();
9733 IEM_MC_END();
9734 }
9735 break;
9736 }
9737 }
9738 return VINF_SUCCESS;
9739}
9740
9741
9742/** Opcode 0x82. */
9743FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9744{
9745 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9746 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9747}
9748
9749
9750/** Opcode 0x83. */
9751FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9752{
9753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9754 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9755 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9756 to the 386 even if absent in the intel reference manuals and some
9757 3rd party opcode listings. */
9758 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9759
9760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9761 {
9762 /*
9763 * Register target
9764 */
9765 IEMOP_HLP_NO_LOCK_PREFIX();
9766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9767 switch (pIemCpu->enmEffOpSize)
9768 {
9769 case IEMMODE_16BIT:
9770 {
9771 IEM_MC_BEGIN(3, 0);
9772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9773 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9775
9776 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9777 IEM_MC_REF_EFLAGS(pEFlags);
9778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9779
9780 IEM_MC_ADVANCE_RIP();
9781 IEM_MC_END();
9782 break;
9783 }
9784
9785 case IEMMODE_32BIT:
9786 {
9787 IEM_MC_BEGIN(3, 0);
9788 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9789 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9790 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9791
9792 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9793 IEM_MC_REF_EFLAGS(pEFlags);
9794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9795 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9796
9797 IEM_MC_ADVANCE_RIP();
9798 IEM_MC_END();
9799 break;
9800 }
9801
9802 case IEMMODE_64BIT:
9803 {
9804 IEM_MC_BEGIN(3, 0);
9805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9806 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9807 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9808
9809 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9810 IEM_MC_REF_EFLAGS(pEFlags);
9811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9812
9813 IEM_MC_ADVANCE_RIP();
9814 IEM_MC_END();
9815 break;
9816 }
9817 }
9818 }
9819 else
9820 {
9821 /*
9822 * Memory target.
9823 */
9824 uint32_t fAccess;
9825 if (pImpl->pfnLockedU16)
9826 fAccess = IEM_ACCESS_DATA_RW;
9827 else
9828 { /* CMP */
9829 IEMOP_HLP_NO_LOCK_PREFIX();
9830 fAccess = IEM_ACCESS_DATA_R;
9831 }
9832
9833 switch (pIemCpu->enmEffOpSize)
9834 {
9835 case IEMMODE_16BIT:
9836 {
9837 IEM_MC_BEGIN(3, 2);
9838 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9839 IEM_MC_ARG(uint16_t, u16Src, 1);
9840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9842
9843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9844 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9845 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9846 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9847 IEM_MC_FETCH_EFLAGS(EFlags);
9848 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9849 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9850 else
9851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9852
9853 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9854 IEM_MC_COMMIT_EFLAGS(EFlags);
9855 IEM_MC_ADVANCE_RIP();
9856 IEM_MC_END();
9857 break;
9858 }
9859
9860 case IEMMODE_32BIT:
9861 {
9862 IEM_MC_BEGIN(3, 2);
9863 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9864 IEM_MC_ARG(uint32_t, u32Src, 1);
9865 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9867
9868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9869 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9870 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9871 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9872 IEM_MC_FETCH_EFLAGS(EFlags);
9873 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9875 else
9876 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9877
9878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9879 IEM_MC_COMMIT_EFLAGS(EFlags);
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 break;
9883 }
9884
9885 case IEMMODE_64BIT:
9886 {
9887 IEM_MC_BEGIN(3, 2);
9888 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9889 IEM_MC_ARG(uint64_t, u64Src, 1);
9890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9892
9893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9895 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9896 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9897 IEM_MC_FETCH_EFLAGS(EFlags);
9898 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9900 else
9901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9902
9903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9904 IEM_MC_COMMIT_EFLAGS(EFlags);
9905 IEM_MC_ADVANCE_RIP();
9906 IEM_MC_END();
9907 break;
9908 }
9909 }
9910 }
9911 return VINF_SUCCESS;
9912}
9913
9914
9915/** Opcode 0x84. */
9916FNIEMOP_DEF(iemOp_test_Eb_Gb)
9917{
9918 IEMOP_MNEMONIC("test Eb,Gb");
9919 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9922}
9923
9924
9925/** Opcode 0x85. */
9926FNIEMOP_DEF(iemOp_test_Ev_Gv)
9927{
9928 IEMOP_MNEMONIC("test Ev,Gv");
9929 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9930 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9931 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9932}
9933
9934
9935/** Opcode 0x86. */
9936FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9937{
9938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9939 IEMOP_MNEMONIC("xchg Eb,Gb");
9940
9941 /*
9942 * If rm is denoting a register, no more instruction bytes.
9943 */
9944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9945 {
9946 IEMOP_HLP_NO_LOCK_PREFIX();
9947
9948 IEM_MC_BEGIN(0, 2);
9949 IEM_MC_LOCAL(uint8_t, uTmp1);
9950 IEM_MC_LOCAL(uint8_t, uTmp2);
9951
9952 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9953 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9954 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9955 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9956
9957 IEM_MC_ADVANCE_RIP();
9958 IEM_MC_END();
9959 }
9960 else
9961 {
9962 /*
9963 * We're accessing memory.
9964 */
9965/** @todo the register must be committed separately! */
9966 IEM_MC_BEGIN(2, 2);
9967 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9968 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9970
9971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9972 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9973 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9974 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9976
9977 IEM_MC_ADVANCE_RIP();
9978 IEM_MC_END();
9979 }
9980 return VINF_SUCCESS;
9981}
9982
9983
9984/** Opcode 0x87. */
9985FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9986{
9987 IEMOP_MNEMONIC("xchg Ev,Gv");
9988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9989
9990 /*
9991 * If rm is denoting a register, no more instruction bytes.
9992 */
9993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9994 {
9995 IEMOP_HLP_NO_LOCK_PREFIX();
9996
9997 switch (pIemCpu->enmEffOpSize)
9998 {
9999 case IEMMODE_16BIT:
10000 IEM_MC_BEGIN(0, 2);
10001 IEM_MC_LOCAL(uint16_t, uTmp1);
10002 IEM_MC_LOCAL(uint16_t, uTmp2);
10003
10004 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10005 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10006 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10007 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10008
10009 IEM_MC_ADVANCE_RIP();
10010 IEM_MC_END();
10011 return VINF_SUCCESS;
10012
10013 case IEMMODE_32BIT:
10014 IEM_MC_BEGIN(0, 2);
10015 IEM_MC_LOCAL(uint32_t, uTmp1);
10016 IEM_MC_LOCAL(uint32_t, uTmp2);
10017
10018 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10019 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10020 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10021 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10022
10023 IEM_MC_ADVANCE_RIP();
10024 IEM_MC_END();
10025 return VINF_SUCCESS;
10026
10027 case IEMMODE_64BIT:
10028 IEM_MC_BEGIN(0, 2);
10029 IEM_MC_LOCAL(uint64_t, uTmp1);
10030 IEM_MC_LOCAL(uint64_t, uTmp2);
10031
10032 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10033 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10034 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10035 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10036
10037 IEM_MC_ADVANCE_RIP();
10038 IEM_MC_END();
10039 return VINF_SUCCESS;
10040
10041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10042 }
10043 }
10044 else
10045 {
10046 /*
10047 * We're accessing memory.
10048 */
10049 switch (pIemCpu->enmEffOpSize)
10050 {
10051/** @todo the register must be committed separately! */
10052 case IEMMODE_16BIT:
10053 IEM_MC_BEGIN(2, 2);
10054 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10055 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10057
10058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10059 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10060 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10061 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10062 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10063
10064 IEM_MC_ADVANCE_RIP();
10065 IEM_MC_END();
10066 return VINF_SUCCESS;
10067
10068 case IEMMODE_32BIT:
10069 IEM_MC_BEGIN(2, 2);
10070 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10071 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10073
10074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10075 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10076 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10077 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10078 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10079
10080 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10081 IEM_MC_ADVANCE_RIP();
10082 IEM_MC_END();
10083 return VINF_SUCCESS;
10084
10085 case IEMMODE_64BIT:
10086 IEM_MC_BEGIN(2, 2);
10087 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10088 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10090
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10092 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10093 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10094 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10095 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10096
10097 IEM_MC_ADVANCE_RIP();
10098 IEM_MC_END();
10099 return VINF_SUCCESS;
10100
10101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10102 }
10103 }
10104}
10105
10106
10107/** Opcode 0x88. */
10108FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10109{
10110 IEMOP_MNEMONIC("mov Eb,Gb");
10111
10112 uint8_t bRm;
10113 IEM_OPCODE_GET_NEXT_U8(&bRm);
10114 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10115
10116 /*
10117 * If rm is denoting a register, no more instruction bytes.
10118 */
10119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10120 {
10121 IEM_MC_BEGIN(0, 1);
10122 IEM_MC_LOCAL(uint8_t, u8Value);
10123 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10124 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10125 IEM_MC_ADVANCE_RIP();
10126 IEM_MC_END();
10127 }
10128 else
10129 {
10130 /*
10131 * We're writing a register to memory.
10132 */
10133 IEM_MC_BEGIN(0, 2);
10134 IEM_MC_LOCAL(uint8_t, u8Value);
10135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10137 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10138 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10139 IEM_MC_ADVANCE_RIP();
10140 IEM_MC_END();
10141 }
10142 return VINF_SUCCESS;
10143
10144}
10145
10146
10147/** Opcode 0x89. */
10148FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10149{
10150 IEMOP_MNEMONIC("mov Ev,Gv");
10151
10152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10153 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10154
10155 /*
10156 * If rm is denoting a register, no more instruction bytes.
10157 */
10158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10159 {
10160 switch (pIemCpu->enmEffOpSize)
10161 {
10162 case IEMMODE_16BIT:
10163 IEM_MC_BEGIN(0, 1);
10164 IEM_MC_LOCAL(uint16_t, u16Value);
10165 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10166 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10167 IEM_MC_ADVANCE_RIP();
10168 IEM_MC_END();
10169 break;
10170
10171 case IEMMODE_32BIT:
10172 IEM_MC_BEGIN(0, 1);
10173 IEM_MC_LOCAL(uint32_t, u32Value);
10174 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10175 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10176 IEM_MC_ADVANCE_RIP();
10177 IEM_MC_END();
10178 break;
10179
10180 case IEMMODE_64BIT:
10181 IEM_MC_BEGIN(0, 1);
10182 IEM_MC_LOCAL(uint64_t, u64Value);
10183 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10184 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10185 IEM_MC_ADVANCE_RIP();
10186 IEM_MC_END();
10187 break;
10188 }
10189 }
10190 else
10191 {
10192 /*
10193 * We're writing a register to memory.
10194 */
10195 switch (pIemCpu->enmEffOpSize)
10196 {
10197 case IEMMODE_16BIT:
10198 IEM_MC_BEGIN(0, 2);
10199 IEM_MC_LOCAL(uint16_t, u16Value);
10200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10202 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10203 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10204 IEM_MC_ADVANCE_RIP();
10205 IEM_MC_END();
10206 break;
10207
10208 case IEMMODE_32BIT:
10209 IEM_MC_BEGIN(0, 2);
10210 IEM_MC_LOCAL(uint32_t, u32Value);
10211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10213 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10214 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10215 IEM_MC_ADVANCE_RIP();
10216 IEM_MC_END();
10217 break;
10218
10219 case IEMMODE_64BIT:
10220 IEM_MC_BEGIN(0, 2);
10221 IEM_MC_LOCAL(uint64_t, u64Value);
10222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10224 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10225 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10226 IEM_MC_ADVANCE_RIP();
10227 IEM_MC_END();
10228 break;
10229 }
10230 }
10231 return VINF_SUCCESS;
10232}
10233
10234
10235/** Opcode 0x8a. */
10236FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10237{
10238 IEMOP_MNEMONIC("mov Gb,Eb");
10239
10240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10241 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10242
10243 /*
10244 * If rm is denoting a register, no more instruction bytes.
10245 */
10246 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10247 {
10248 IEM_MC_BEGIN(0, 1);
10249 IEM_MC_LOCAL(uint8_t, u8Value);
10250 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10251 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10252 IEM_MC_ADVANCE_RIP();
10253 IEM_MC_END();
10254 }
10255 else
10256 {
10257 /*
10258 * We're loading a register from memory.
10259 */
10260 IEM_MC_BEGIN(0, 2);
10261 IEM_MC_LOCAL(uint8_t, u8Value);
10262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10264 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10265 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10266 IEM_MC_ADVANCE_RIP();
10267 IEM_MC_END();
10268 }
10269 return VINF_SUCCESS;
10270}
10271
10272
10273/** Opcode 0x8b. */
10274FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10275{
10276 IEMOP_MNEMONIC("mov Gv,Ev");
10277
10278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10279 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10280
10281 /*
10282 * If rm is denoting a register, no more instruction bytes.
10283 */
10284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10285 {
10286 switch (pIemCpu->enmEffOpSize)
10287 {
10288 case IEMMODE_16BIT:
10289 IEM_MC_BEGIN(0, 1);
10290 IEM_MC_LOCAL(uint16_t, u16Value);
10291 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10292 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10293 IEM_MC_ADVANCE_RIP();
10294 IEM_MC_END();
10295 break;
10296
10297 case IEMMODE_32BIT:
10298 IEM_MC_BEGIN(0, 1);
10299 IEM_MC_LOCAL(uint32_t, u32Value);
10300 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10301 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10302 IEM_MC_ADVANCE_RIP();
10303 IEM_MC_END();
10304 break;
10305
10306 case IEMMODE_64BIT:
10307 IEM_MC_BEGIN(0, 1);
10308 IEM_MC_LOCAL(uint64_t, u64Value);
10309 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10310 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10311 IEM_MC_ADVANCE_RIP();
10312 IEM_MC_END();
10313 break;
10314 }
10315 }
10316 else
10317 {
10318 /*
10319 * We're loading a register from memory.
10320 */
10321 switch (pIemCpu->enmEffOpSize)
10322 {
10323 case IEMMODE_16BIT:
10324 IEM_MC_BEGIN(0, 2);
10325 IEM_MC_LOCAL(uint16_t, u16Value);
10326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10328 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10329 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10330 IEM_MC_ADVANCE_RIP();
10331 IEM_MC_END();
10332 break;
10333
10334 case IEMMODE_32BIT:
10335 IEM_MC_BEGIN(0, 2);
10336 IEM_MC_LOCAL(uint32_t, u32Value);
10337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10339 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10340 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10341 IEM_MC_ADVANCE_RIP();
10342 IEM_MC_END();
10343 break;
10344
10345 case IEMMODE_64BIT:
10346 IEM_MC_BEGIN(0, 2);
10347 IEM_MC_LOCAL(uint64_t, u64Value);
10348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10350 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10351 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10352 IEM_MC_ADVANCE_RIP();
10353 IEM_MC_END();
10354 break;
10355 }
10356 }
10357 return VINF_SUCCESS;
10358}
10359
10360
10361/** Opcode 0x63. */
10362FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10363{
10364 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10365 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10366 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10367 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10368 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10369}
10370
10371
10372/** Opcode 0x8c. */
10373FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10374{
10375 IEMOP_MNEMONIC("mov Ev,Sw");
10376
10377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10378 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10379
10380 /*
10381 * Check that the destination register exists. The REX.R prefix is ignored.
10382 */
10383 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10384 if ( iSegReg > X86_SREG_GS)
10385 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10386
10387 /*
10388 * If rm is denoting a register, no more instruction bytes.
10389 * In that case, the operand size is respected and the upper bits are
10390 * cleared (starting with some pentium).
10391 */
10392 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10393 {
10394 switch (pIemCpu->enmEffOpSize)
10395 {
10396 case IEMMODE_16BIT:
10397 IEM_MC_BEGIN(0, 1);
10398 IEM_MC_LOCAL(uint16_t, u16Value);
10399 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10400 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10401 IEM_MC_ADVANCE_RIP();
10402 IEM_MC_END();
10403 break;
10404
10405 case IEMMODE_32BIT:
10406 IEM_MC_BEGIN(0, 1);
10407 IEM_MC_LOCAL(uint32_t, u32Value);
10408 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10409 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10410 IEM_MC_ADVANCE_RIP();
10411 IEM_MC_END();
10412 break;
10413
10414 case IEMMODE_64BIT:
10415 IEM_MC_BEGIN(0, 1);
10416 IEM_MC_LOCAL(uint64_t, u64Value);
10417 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10418 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10419 IEM_MC_ADVANCE_RIP();
10420 IEM_MC_END();
10421 break;
10422 }
10423 }
10424 else
10425 {
10426 /*
10427 * We're saving the register to memory. The access is word sized
10428 * regardless of operand size prefixes.
10429 */
10430#if 0 /* not necessary */
10431 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10432#endif
10433 IEM_MC_BEGIN(0, 2);
10434 IEM_MC_LOCAL(uint16_t, u16Value);
10435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10437 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10438 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10439 IEM_MC_ADVANCE_RIP();
10440 IEM_MC_END();
10441 }
10442 return VINF_SUCCESS;
10443}
10444
10445
10446
10447
10448/** Opcode 0x8d. */
10449FNIEMOP_DEF(iemOp_lea_Gv_M)
10450{
10451 IEMOP_MNEMONIC("lea Gv,M");
10452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10453 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10454 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10455 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10456
10457 switch (pIemCpu->enmEffOpSize)
10458 {
10459 case IEMMODE_16BIT:
10460 IEM_MC_BEGIN(0, 2);
10461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10462 IEM_MC_LOCAL(uint16_t, u16Cast);
10463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10464 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10465 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10466 IEM_MC_ADVANCE_RIP();
10467 IEM_MC_END();
10468 return VINF_SUCCESS;
10469
10470 case IEMMODE_32BIT:
10471 IEM_MC_BEGIN(0, 2);
10472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10473 IEM_MC_LOCAL(uint32_t, u32Cast);
10474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10475 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10476 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10477 IEM_MC_ADVANCE_RIP();
10478 IEM_MC_END();
10479 return VINF_SUCCESS;
10480
10481 case IEMMODE_64BIT:
10482 IEM_MC_BEGIN(0, 1);
10483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10485 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10486 IEM_MC_ADVANCE_RIP();
10487 IEM_MC_END();
10488 return VINF_SUCCESS;
10489 }
10490 AssertFailedReturn(VERR_IEM_IPE_7);
10491}
10492
10493
10494/** Opcode 0x8e. */
10495FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10496{
10497 IEMOP_MNEMONIC("mov Sw,Ev");
10498
10499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10500 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10501
10502 /*
10503 * The practical operand size is 16-bit.
10504 */
10505#if 0 /* not necessary */
10506 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10507#endif
10508
10509 /*
10510 * Check that the destination register exists and can be used with this
10511 * instruction. The REX.R prefix is ignored.
10512 */
10513 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10514 if ( iSegReg == X86_SREG_CS
10515 || iSegReg > X86_SREG_GS)
10516 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10517
10518 /*
10519 * If rm is denoting a register, no more instruction bytes.
10520 */
10521 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10522 {
10523 IEM_MC_BEGIN(2, 0);
10524 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10525 IEM_MC_ARG(uint16_t, u16Value, 1);
10526 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10527 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10528 IEM_MC_END();
10529 }
10530 else
10531 {
10532 /*
10533 * We're loading the register from memory. The access is word sized
10534 * regardless of operand size prefixes.
10535 */
10536 IEM_MC_BEGIN(2, 1);
10537 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10538 IEM_MC_ARG(uint16_t, u16Value, 1);
10539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10541 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10542 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10543 IEM_MC_END();
10544 }
10545 return VINF_SUCCESS;
10546}
10547
10548
10549/** Opcode 0x8f /0. */
10550FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10551{
10552 /* This bugger is rather annoying as it requires rSP to be updated before
10553 doing the effective address calculations. Will eventually require a
10554 split between the R/M+SIB decoding and the effective address
10555 calculation - which is something that is required for any attempt at
10556 reusing this code for a recompiler. It may also be good to have if we
10557 need to delay #UD exception caused by invalid lock prefixes.
10558
10559 For now, we'll do a mostly safe interpreter-only implementation here. */
10560 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10561 * now until tests show it's checked.. */
10562 IEMOP_MNEMONIC("pop Ev");
10563 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10564
10565 /* Register access is relatively easy and can share code. */
10566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10567 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10568
10569 /*
10570 * Memory target.
10571 *
10572 * Intel says that RSP is incremented before it's used in any effective
10573 * address calcuations. This means some serious extra annoyance here since
10574 * we decode and calculate the effective address in one step and like to
10575 * delay committing registers till everything is done.
10576 *
10577 * So, we'll decode and calculate the effective address twice. This will
10578 * require some recoding if turned into a recompiler.
10579 */
10580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10581
10582#ifndef TST_IEM_CHECK_MC
10583 /* Calc effective address with modified ESP. */
10584 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10585 RTGCPTR GCPtrEff;
10586 VBOXSTRICTRC rcStrict;
10587 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10588 if (rcStrict != VINF_SUCCESS)
10589 return rcStrict;
10590 pIemCpu->offOpcode = offOpcodeSaved;
10591
10592 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10593 uint64_t const RspSaved = pCtx->rsp;
10594 switch (pIemCpu->enmEffOpSize)
10595 {
10596 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10597 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10598 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10600 }
10601 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10602 Assert(rcStrict == VINF_SUCCESS);
10603 pCtx->rsp = RspSaved;
10604
10605 /* Perform the operation - this should be CImpl. */
10606 RTUINT64U TmpRsp;
10607 TmpRsp.u = pCtx->rsp;
10608 switch (pIemCpu->enmEffOpSize)
10609 {
10610 case IEMMODE_16BIT:
10611 {
10612 uint16_t u16Value;
10613 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10614 if (rcStrict == VINF_SUCCESS)
10615 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10616 break;
10617 }
10618
10619 case IEMMODE_32BIT:
10620 {
10621 uint32_t u32Value;
10622 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10623 if (rcStrict == VINF_SUCCESS)
10624 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10625 break;
10626 }
10627
10628 case IEMMODE_64BIT:
10629 {
10630 uint64_t u64Value;
10631 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10632 if (rcStrict == VINF_SUCCESS)
10633 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10634 break;
10635 }
10636
10637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10638 }
10639 if (rcStrict == VINF_SUCCESS)
10640 {
10641 pCtx->rsp = TmpRsp.u;
10642 iemRegUpdateRipAndClearRF(pIemCpu);
10643 }
10644 return rcStrict;
10645
10646#else
10647 return VERR_IEM_IPE_2;
10648#endif
10649}
10650
10651
10652/** Opcode 0x8f. */
10653FNIEMOP_DEF(iemOp_Grp1A)
10654{
10655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10656 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10657 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10658
10659 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10660 /** @todo XOP decoding. */
10661 IEMOP_MNEMONIC("3-byte-xop");
10662 return IEMOP_RAISE_INVALID_OPCODE();
10663}
10664
10665
10666/**
10667 * Common 'xchg reg,rAX' helper.
10668 */
10669FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10670{
10671 IEMOP_HLP_NO_LOCK_PREFIX();
10672
10673 iReg |= pIemCpu->uRexB;
10674 switch (pIemCpu->enmEffOpSize)
10675 {
10676 case IEMMODE_16BIT:
10677 IEM_MC_BEGIN(0, 2);
10678 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10679 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10680 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10681 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10682 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10683 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10684 IEM_MC_ADVANCE_RIP();
10685 IEM_MC_END();
10686 return VINF_SUCCESS;
10687
10688 case IEMMODE_32BIT:
10689 IEM_MC_BEGIN(0, 2);
10690 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10691 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10692 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10693 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10694 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10695 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10696 IEM_MC_ADVANCE_RIP();
10697 IEM_MC_END();
10698 return VINF_SUCCESS;
10699
10700 case IEMMODE_64BIT:
10701 IEM_MC_BEGIN(0, 2);
10702 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10703 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10704 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10705 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10706 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10707 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10708 IEM_MC_ADVANCE_RIP();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711
10712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10713 }
10714}
10715
10716
10717/** Opcode 0x90. */
10718FNIEMOP_DEF(iemOp_nop)
10719{
10720 /* R8/R8D and RAX/EAX can be exchanged. */
10721 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10722 {
10723 IEMOP_MNEMONIC("xchg r8,rAX");
10724 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10725 }
10726
10727 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10728 IEMOP_MNEMONIC("pause");
10729 else
10730 IEMOP_MNEMONIC("nop");
10731 IEM_MC_BEGIN(0, 0);
10732 IEM_MC_ADVANCE_RIP();
10733 IEM_MC_END();
10734 return VINF_SUCCESS;
10735}
10736
10737
10738/** Opcode 0x91. */
10739FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10740{
10741 IEMOP_MNEMONIC("xchg rCX,rAX");
10742 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10743}
10744
10745
10746/** Opcode 0x92. */
10747FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10748{
10749 IEMOP_MNEMONIC("xchg rDX,rAX");
10750 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10751}
10752
10753
10754/** Opcode 0x93. */
10755FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10756{
10757 IEMOP_MNEMONIC("xchg rBX,rAX");
10758 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10759}
10760
10761
10762/** Opcode 0x94. */
10763FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10764{
10765 IEMOP_MNEMONIC("xchg rSX,rAX");
10766 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10767}
10768
10769
10770/** Opcode 0x95. */
10771FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10772{
10773 IEMOP_MNEMONIC("xchg rBP,rAX");
10774 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10775}
10776
10777
10778/** Opcode 0x96. */
10779FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10780{
10781 IEMOP_MNEMONIC("xchg rSI,rAX");
10782 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10783}
10784
10785
10786/** Opcode 0x97. */
10787FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10788{
10789 IEMOP_MNEMONIC("xchg rDI,rAX");
10790 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10791}
10792
10793
10794/** Opcode 0x98. */
10795FNIEMOP_DEF(iemOp_cbw)
10796{
10797 IEMOP_HLP_NO_LOCK_PREFIX();
10798 switch (pIemCpu->enmEffOpSize)
10799 {
10800 case IEMMODE_16BIT:
10801 IEMOP_MNEMONIC("cbw");
10802 IEM_MC_BEGIN(0, 1);
10803 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10804 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10805 } IEM_MC_ELSE() {
10806 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10807 } IEM_MC_ENDIF();
10808 IEM_MC_ADVANCE_RIP();
10809 IEM_MC_END();
10810 return VINF_SUCCESS;
10811
10812 case IEMMODE_32BIT:
10813 IEMOP_MNEMONIC("cwde");
10814 IEM_MC_BEGIN(0, 1);
10815 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10816 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10817 } IEM_MC_ELSE() {
10818 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10819 } IEM_MC_ENDIF();
10820 IEM_MC_ADVANCE_RIP();
10821 IEM_MC_END();
10822 return VINF_SUCCESS;
10823
10824 case IEMMODE_64BIT:
10825 IEMOP_MNEMONIC("cdqe");
10826 IEM_MC_BEGIN(0, 1);
10827 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10828 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10829 } IEM_MC_ELSE() {
10830 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10831 } IEM_MC_ENDIF();
10832 IEM_MC_ADVANCE_RIP();
10833 IEM_MC_END();
10834 return VINF_SUCCESS;
10835
10836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10837 }
10838}
10839
10840
10841/** Opcode 0x99. */
10842FNIEMOP_DEF(iemOp_cwd)
10843{
10844 IEMOP_HLP_NO_LOCK_PREFIX();
10845 switch (pIemCpu->enmEffOpSize)
10846 {
10847 case IEMMODE_16BIT:
10848 IEMOP_MNEMONIC("cwd");
10849 IEM_MC_BEGIN(0, 1);
10850 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10851 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10852 } IEM_MC_ELSE() {
10853 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10854 } IEM_MC_ENDIF();
10855 IEM_MC_ADVANCE_RIP();
10856 IEM_MC_END();
10857 return VINF_SUCCESS;
10858
10859 case IEMMODE_32BIT:
10860 IEMOP_MNEMONIC("cdq");
10861 IEM_MC_BEGIN(0, 1);
10862 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10863 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10864 } IEM_MC_ELSE() {
10865 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10866 } IEM_MC_ENDIF();
10867 IEM_MC_ADVANCE_RIP();
10868 IEM_MC_END();
10869 return VINF_SUCCESS;
10870
10871 case IEMMODE_64BIT:
10872 IEMOP_MNEMONIC("cqo");
10873 IEM_MC_BEGIN(0, 1);
10874 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10875 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10876 } IEM_MC_ELSE() {
10877 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10878 } IEM_MC_ENDIF();
10879 IEM_MC_ADVANCE_RIP();
10880 IEM_MC_END();
10881 return VINF_SUCCESS;
10882
10883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10884 }
10885}
10886
10887
10888/** Opcode 0x9a. */
10889FNIEMOP_DEF(iemOp_call_Ap)
10890{
10891 IEMOP_MNEMONIC("call Ap");
10892 IEMOP_HLP_NO_64BIT();
10893
10894 /* Decode the far pointer address and pass it on to the far call C implementation. */
10895 uint32_t offSeg;
10896 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10897 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10898 else
10899 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10900 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10902 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10903}
10904
10905
10906/** Opcode 0x9b. (aka fwait) */
10907FNIEMOP_DEF(iemOp_wait)
10908{
10909 IEMOP_MNEMONIC("wait");
10910 IEMOP_HLP_NO_LOCK_PREFIX();
10911
10912 IEM_MC_BEGIN(0, 0);
10913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10915 IEM_MC_ADVANCE_RIP();
10916 IEM_MC_END();
10917 return VINF_SUCCESS;
10918}
10919
10920
10921/** Opcode 0x9c. */
10922FNIEMOP_DEF(iemOp_pushf_Fv)
10923{
10924 IEMOP_HLP_NO_LOCK_PREFIX();
10925 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10926 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10927}
10928
10929
10930/** Opcode 0x9d. */
10931FNIEMOP_DEF(iemOp_popf_Fv)
10932{
10933 IEMOP_HLP_NO_LOCK_PREFIX();
10934 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10935 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10936}
10937
10938
10939/** Opcode 0x9e. */
10940FNIEMOP_DEF(iemOp_sahf)
10941{
10942 IEMOP_MNEMONIC("sahf");
10943 IEMOP_HLP_NO_LOCK_PREFIX();
10944 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10945 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10946 return IEMOP_RAISE_INVALID_OPCODE();
10947 IEM_MC_BEGIN(0, 2);
10948 IEM_MC_LOCAL(uint32_t, u32Flags);
10949 IEM_MC_LOCAL(uint32_t, EFlags);
10950 IEM_MC_FETCH_EFLAGS(EFlags);
10951 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10952 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10953 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10954 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10955 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10956 IEM_MC_COMMIT_EFLAGS(EFlags);
10957 IEM_MC_ADVANCE_RIP();
10958 IEM_MC_END();
10959 return VINF_SUCCESS;
10960}
10961
10962
10963/** Opcode 0x9f. */
10964FNIEMOP_DEF(iemOp_lahf)
10965{
10966 IEMOP_MNEMONIC("lahf");
10967 IEMOP_HLP_NO_LOCK_PREFIX();
10968 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10969 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10970 return IEMOP_RAISE_INVALID_OPCODE();
10971 IEM_MC_BEGIN(0, 1);
10972 IEM_MC_LOCAL(uint8_t, u8Flags);
10973 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10974 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10975 IEM_MC_ADVANCE_RIP();
10976 IEM_MC_END();
10977 return VINF_SUCCESS;
10978}
10979
10980
10981/**
10982 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10983 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10984 * prefixes. Will return on failures.
10985 * @param a_GCPtrMemOff The variable to store the offset in.
10986 */
10987#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10988 do \
10989 { \
10990 switch (pIemCpu->enmEffAddrMode) \
10991 { \
10992 case IEMMODE_16BIT: \
10993 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10994 break; \
10995 case IEMMODE_32BIT: \
10996 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10997 break; \
10998 case IEMMODE_64BIT: \
10999 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11000 break; \
11001 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11002 } \
11003 IEMOP_HLP_NO_LOCK_PREFIX(); \
11004 } while (0)
11005
11006/** Opcode 0xa0. */
11007FNIEMOP_DEF(iemOp_mov_Al_Ob)
11008{
11009 /*
11010 * Get the offset and fend of lock prefixes.
11011 */
11012 RTGCPTR GCPtrMemOff;
11013 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11014
11015 /*
11016 * Fetch AL.
11017 */
11018 IEM_MC_BEGIN(0,1);
11019 IEM_MC_LOCAL(uint8_t, u8Tmp);
11020 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11021 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11022 IEM_MC_ADVANCE_RIP();
11023 IEM_MC_END();
11024 return VINF_SUCCESS;
11025}
11026
11027
11028/** Opcode 0xa1. */
11029FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11030{
11031 /*
11032 * Get the offset and fend of lock prefixes.
11033 */
11034 IEMOP_MNEMONIC("mov rAX,Ov");
11035 RTGCPTR GCPtrMemOff;
11036 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11037
11038 /*
11039 * Fetch rAX.
11040 */
11041 switch (pIemCpu->enmEffOpSize)
11042 {
11043 case IEMMODE_16BIT:
11044 IEM_MC_BEGIN(0,1);
11045 IEM_MC_LOCAL(uint16_t, u16Tmp);
11046 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11047 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11048 IEM_MC_ADVANCE_RIP();
11049 IEM_MC_END();
11050 return VINF_SUCCESS;
11051
11052 case IEMMODE_32BIT:
11053 IEM_MC_BEGIN(0,1);
11054 IEM_MC_LOCAL(uint32_t, u32Tmp);
11055 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11056 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11057 IEM_MC_ADVANCE_RIP();
11058 IEM_MC_END();
11059 return VINF_SUCCESS;
11060
11061 case IEMMODE_64BIT:
11062 IEM_MC_BEGIN(0,1);
11063 IEM_MC_LOCAL(uint64_t, u64Tmp);
11064 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11065 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11066 IEM_MC_ADVANCE_RIP();
11067 IEM_MC_END();
11068 return VINF_SUCCESS;
11069
11070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11071 }
11072}
11073
11074
11075/** Opcode 0xa2. */
11076FNIEMOP_DEF(iemOp_mov_Ob_AL)
11077{
11078 /*
11079 * Get the offset and fend of lock prefixes.
11080 */
11081 RTGCPTR GCPtrMemOff;
11082 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11083
11084 /*
11085 * Store AL.
11086 */
11087 IEM_MC_BEGIN(0,1);
11088 IEM_MC_LOCAL(uint8_t, u8Tmp);
11089 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11090 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
11091 IEM_MC_ADVANCE_RIP();
11092 IEM_MC_END();
11093 return VINF_SUCCESS;
11094}
11095
11096
11097/** Opcode 0xa3. */
11098FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11099{
11100 /*
11101 * Get the offset and fend of lock prefixes.
11102 */
11103 RTGCPTR GCPtrMemOff;
11104 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11105
11106 /*
11107 * Store rAX.
11108 */
11109 switch (pIemCpu->enmEffOpSize)
11110 {
11111 case IEMMODE_16BIT:
11112 IEM_MC_BEGIN(0,1);
11113 IEM_MC_LOCAL(uint16_t, u16Tmp);
11114 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11115 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
11116 IEM_MC_ADVANCE_RIP();
11117 IEM_MC_END();
11118 return VINF_SUCCESS;
11119
11120 case IEMMODE_32BIT:
11121 IEM_MC_BEGIN(0,1);
11122 IEM_MC_LOCAL(uint32_t, u32Tmp);
11123 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11124 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11125 IEM_MC_ADVANCE_RIP();
11126 IEM_MC_END();
11127 return VINF_SUCCESS;
11128
11129 case IEMMODE_64BIT:
11130 IEM_MC_BEGIN(0,1);
11131 IEM_MC_LOCAL(uint64_t, u64Tmp);
11132 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11133 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11134 IEM_MC_ADVANCE_RIP();
11135 IEM_MC_END();
11136 return VINF_SUCCESS;
11137
11138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11139 }
11140}
11141
11142/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11143#define IEM_MOVS_CASE(ValBits, AddrBits) \
11144 IEM_MC_BEGIN(0, 2); \
11145 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11146 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11147 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11148 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11149 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11150 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11152 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11153 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11154 } IEM_MC_ELSE() { \
11155 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11156 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11157 } IEM_MC_ENDIF(); \
11158 IEM_MC_ADVANCE_RIP(); \
11159 IEM_MC_END();
11160
11161/** Opcode 0xa4. */
11162FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11163{
11164 IEMOP_HLP_NO_LOCK_PREFIX();
11165
11166 /*
11167 * Use the C implementation if a repeat prefix is encountered.
11168 */
11169 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11170 {
11171 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11172 switch (pIemCpu->enmEffAddrMode)
11173 {
11174 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11175 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11176 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11178 }
11179 }
11180 IEMOP_MNEMONIC("movsb Xb,Yb");
11181
11182 /*
11183 * Sharing case implementation with movs[wdq] below.
11184 */
11185 switch (pIemCpu->enmEffAddrMode)
11186 {
11187 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11188 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11189 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11191 }
11192 return VINF_SUCCESS;
11193}
11194
11195
11196/** Opcode 0xa5. */
11197FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11198{
11199 IEMOP_HLP_NO_LOCK_PREFIX();
11200
11201 /*
11202 * Use the C implementation if a repeat prefix is encountered.
11203 */
11204 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11205 {
11206 IEMOP_MNEMONIC("rep movs Xv,Yv");
11207 switch (pIemCpu->enmEffOpSize)
11208 {
11209 case IEMMODE_16BIT:
11210 switch (pIemCpu->enmEffAddrMode)
11211 {
11212 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11213 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11214 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11216 }
11217 break;
11218 case IEMMODE_32BIT:
11219 switch (pIemCpu->enmEffAddrMode)
11220 {
11221 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11222 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11223 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11225 }
11226 case IEMMODE_64BIT:
11227 switch (pIemCpu->enmEffAddrMode)
11228 {
11229 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11230 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11231 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11233 }
11234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11235 }
11236 }
11237 IEMOP_MNEMONIC("movs Xv,Yv");
11238
11239 /*
11240 * Annoying double switch here.
11241 * Using ugly macro for implementing the cases, sharing it with movsb.
11242 */
11243 switch (pIemCpu->enmEffOpSize)
11244 {
11245 case IEMMODE_16BIT:
11246 switch (pIemCpu->enmEffAddrMode)
11247 {
11248 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11249 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11250 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11252 }
11253 break;
11254
11255 case IEMMODE_32BIT:
11256 switch (pIemCpu->enmEffAddrMode)
11257 {
11258 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11259 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11260 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11262 }
11263 break;
11264
11265 case IEMMODE_64BIT:
11266 switch (pIemCpu->enmEffAddrMode)
11267 {
11268 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11269 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11270 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11272 }
11273 break;
11274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11275 }
11276 return VINF_SUCCESS;
11277}
11278
11279#undef IEM_MOVS_CASE
11280
11281/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11282#define IEM_CMPS_CASE(ValBits, AddrBits) \
11283 IEM_MC_BEGIN(3, 3); \
11284 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11285 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11286 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11287 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11288 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11289 \
11290 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11291 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11292 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11293 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11294 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11295 IEM_MC_REF_EFLAGS(pEFlags); \
11296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11297 \
11298 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11299 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11300 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11301 } IEM_MC_ELSE() { \
11302 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11303 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11304 } IEM_MC_ENDIF(); \
11305 IEM_MC_ADVANCE_RIP(); \
11306 IEM_MC_END(); \
11307
11308/** Opcode 0xa6. */
11309FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11310{
11311 IEMOP_HLP_NO_LOCK_PREFIX();
11312
11313 /*
11314 * Use the C implementation if a repeat prefix is encountered.
11315 */
11316 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11317 {
11318 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11319 switch (pIemCpu->enmEffAddrMode)
11320 {
11321 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11322 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11323 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11325 }
11326 }
11327 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11328 {
11329 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11330 switch (pIemCpu->enmEffAddrMode)
11331 {
11332 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11333 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11334 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11336 }
11337 }
11338 IEMOP_MNEMONIC("cmps Xb,Yb");
11339
11340 /*
11341 * Sharing case implementation with cmps[wdq] below.
11342 */
11343 switch (pIemCpu->enmEffAddrMode)
11344 {
11345 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11346 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11347 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11349 }
11350 return VINF_SUCCESS;
11351
11352}
11353
11354
11355/** Opcode 0xa7. */
11356FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11357{
11358 IEMOP_HLP_NO_LOCK_PREFIX();
11359
11360 /*
11361 * Use the C implementation if a repeat prefix is encountered.
11362 */
11363 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11364 {
11365 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11366 switch (pIemCpu->enmEffOpSize)
11367 {
11368 case IEMMODE_16BIT:
11369 switch (pIemCpu->enmEffAddrMode)
11370 {
11371 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11372 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11373 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11375 }
11376 break;
11377 case IEMMODE_32BIT:
11378 switch (pIemCpu->enmEffAddrMode)
11379 {
11380 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11381 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11382 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11384 }
11385 case IEMMODE_64BIT:
11386 switch (pIemCpu->enmEffAddrMode)
11387 {
11388 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11389 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11390 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11392 }
11393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11394 }
11395 }
11396
11397 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11398 {
11399 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11400 switch (pIemCpu->enmEffOpSize)
11401 {
11402 case IEMMODE_16BIT:
11403 switch (pIemCpu->enmEffAddrMode)
11404 {
11405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11409 }
11410 break;
11411 case IEMMODE_32BIT:
11412 switch (pIemCpu->enmEffAddrMode)
11413 {
11414 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11415 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11416 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11418 }
11419 case IEMMODE_64BIT:
11420 switch (pIemCpu->enmEffAddrMode)
11421 {
11422 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11423 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11424 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11426 }
11427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11428 }
11429 }
11430
11431 IEMOP_MNEMONIC("cmps Xv,Yv");
11432
11433 /*
11434 * Annoying double switch here.
11435 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11436 */
11437 switch (pIemCpu->enmEffOpSize)
11438 {
11439 case IEMMODE_16BIT:
11440 switch (pIemCpu->enmEffAddrMode)
11441 {
11442 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11443 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11444 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11446 }
11447 break;
11448
11449 case IEMMODE_32BIT:
11450 switch (pIemCpu->enmEffAddrMode)
11451 {
11452 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11453 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11454 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11456 }
11457 break;
11458
11459 case IEMMODE_64BIT:
11460 switch (pIemCpu->enmEffAddrMode)
11461 {
11462 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11463 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11464 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11466 }
11467 break;
11468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11469 }
11470 return VINF_SUCCESS;
11471
11472}
11473
11474#undef IEM_CMPS_CASE
11475
11476/** Opcode 0xa8. */
11477FNIEMOP_DEF(iemOp_test_AL_Ib)
11478{
11479 IEMOP_MNEMONIC("test al,Ib");
11480 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11482}
11483
11484
11485/** Opcode 0xa9. */
11486FNIEMOP_DEF(iemOp_test_eAX_Iz)
11487{
11488 IEMOP_MNEMONIC("test rAX,Iz");
11489 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11490 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11491}
11492
11493
11494/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11495#define IEM_STOS_CASE(ValBits, AddrBits) \
11496 IEM_MC_BEGIN(0, 2); \
11497 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11498 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11499 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11500 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11501 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11502 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11503 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11504 } IEM_MC_ELSE() { \
11505 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11506 } IEM_MC_ENDIF(); \
11507 IEM_MC_ADVANCE_RIP(); \
11508 IEM_MC_END(); \
11509
11510/** Opcode 0xaa. */
11511FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11512{
11513 IEMOP_HLP_NO_LOCK_PREFIX();
11514
11515 /*
11516 * Use the C implementation if a repeat prefix is encountered.
11517 */
11518 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11519 {
11520 IEMOP_MNEMONIC("rep stos Yb,al");
11521 switch (pIemCpu->enmEffAddrMode)
11522 {
11523 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11524 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11525 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11527 }
11528 }
11529 IEMOP_MNEMONIC("stos Yb,al");
11530
11531 /*
11532 * Sharing case implementation with stos[wdq] below.
11533 */
11534 switch (pIemCpu->enmEffAddrMode)
11535 {
11536 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11537 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11538 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11540 }
11541 return VINF_SUCCESS;
11542}
11543
11544
11545/** Opcode 0xab. */
11546FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11547{
11548 IEMOP_HLP_NO_LOCK_PREFIX();
11549
11550 /*
11551 * Use the C implementation if a repeat prefix is encountered.
11552 */
11553 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11554 {
11555 IEMOP_MNEMONIC("rep stos Yv,rAX");
11556 switch (pIemCpu->enmEffOpSize)
11557 {
11558 case IEMMODE_16BIT:
11559 switch (pIemCpu->enmEffAddrMode)
11560 {
11561 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11562 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11563 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11565 }
11566 break;
11567 case IEMMODE_32BIT:
11568 switch (pIemCpu->enmEffAddrMode)
11569 {
11570 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11571 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11572 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11574 }
11575 case IEMMODE_64BIT:
11576 switch (pIemCpu->enmEffAddrMode)
11577 {
11578 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11579 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11580 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11582 }
11583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11584 }
11585 }
11586 IEMOP_MNEMONIC("stos Yv,rAX");
11587
11588 /*
11589 * Annoying double switch here.
11590 * Using ugly macro for implementing the cases, sharing it with stosb.
11591 */
11592 switch (pIemCpu->enmEffOpSize)
11593 {
11594 case IEMMODE_16BIT:
11595 switch (pIemCpu->enmEffAddrMode)
11596 {
11597 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11598 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11599 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11601 }
11602 break;
11603
11604 case IEMMODE_32BIT:
11605 switch (pIemCpu->enmEffAddrMode)
11606 {
11607 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11608 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11609 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11611 }
11612 break;
11613
11614 case IEMMODE_64BIT:
11615 switch (pIemCpu->enmEffAddrMode)
11616 {
11617 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11618 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11619 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11621 }
11622 break;
11623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11624 }
11625 return VINF_SUCCESS;
11626}
11627
11628#undef IEM_STOS_CASE
11629
11630/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11631#define IEM_LODS_CASE(ValBits, AddrBits) \
11632 IEM_MC_BEGIN(0, 2); \
11633 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11634 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11635 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11636 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11637 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11639 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11640 } IEM_MC_ELSE() { \
11641 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11642 } IEM_MC_ENDIF(); \
11643 IEM_MC_ADVANCE_RIP(); \
11644 IEM_MC_END();
11645
11646/** Opcode 0xac. */
11647FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11648{
11649 IEMOP_HLP_NO_LOCK_PREFIX();
11650
11651 /*
11652 * Use the C implementation if a repeat prefix is encountered.
11653 */
11654 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11655 {
11656 IEMOP_MNEMONIC("rep lodsb al,Xb");
11657 switch (pIemCpu->enmEffAddrMode)
11658 {
11659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11663 }
11664 }
11665 IEMOP_MNEMONIC("lodsb al,Xb");
11666
11667 /*
11668 * Sharing case implementation with stos[wdq] below.
11669 */
11670 switch (pIemCpu->enmEffAddrMode)
11671 {
11672 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11673 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11674 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11676 }
11677 return VINF_SUCCESS;
11678}
11679
11680
11681/** Opcode 0xad. */
11682FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11683{
11684 IEMOP_HLP_NO_LOCK_PREFIX();
11685
11686 /*
11687 * Use the C implementation if a repeat prefix is encountered.
11688 */
11689 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11690 {
11691 IEMOP_MNEMONIC("rep lods rAX,Xv");
11692 switch (pIemCpu->enmEffOpSize)
11693 {
11694 case IEMMODE_16BIT:
11695 switch (pIemCpu->enmEffAddrMode)
11696 {
11697 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11698 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11699 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11701 }
11702 break;
11703 case IEMMODE_32BIT:
11704 switch (pIemCpu->enmEffAddrMode)
11705 {
11706 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11707 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11708 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11710 }
11711 case IEMMODE_64BIT:
11712 switch (pIemCpu->enmEffAddrMode)
11713 {
11714 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11715 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11716 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11718 }
11719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11720 }
11721 }
11722 IEMOP_MNEMONIC("lods rAX,Xv");
11723
11724 /*
11725 * Annoying double switch here.
11726 * Using ugly macro for implementing the cases, sharing it with lodsb.
11727 */
11728 switch (pIemCpu->enmEffOpSize)
11729 {
11730 case IEMMODE_16BIT:
11731 switch (pIemCpu->enmEffAddrMode)
11732 {
11733 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11734 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11735 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11737 }
11738 break;
11739
11740 case IEMMODE_32BIT:
11741 switch (pIemCpu->enmEffAddrMode)
11742 {
11743 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11744 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11745 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11747 }
11748 break;
11749
11750 case IEMMODE_64BIT:
11751 switch (pIemCpu->enmEffAddrMode)
11752 {
11753 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11754 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11755 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11757 }
11758 break;
11759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11760 }
11761 return VINF_SUCCESS;
11762}
11763
11764#undef IEM_LODS_CASE
11765
11766/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11767#define IEM_SCAS_CASE(ValBits, AddrBits) \
11768 IEM_MC_BEGIN(3, 2); \
11769 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11770 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11771 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11772 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11773 \
11774 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11775 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11776 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11777 IEM_MC_REF_EFLAGS(pEFlags); \
11778 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11779 \
11780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11781 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11782 } IEM_MC_ELSE() { \
11783 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11784 } IEM_MC_ENDIF(); \
11785 IEM_MC_ADVANCE_RIP(); \
11786 IEM_MC_END();
11787
11788/** Opcode 0xae. */
11789FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11790{
11791 IEMOP_HLP_NO_LOCK_PREFIX();
11792
11793 /*
11794 * Use the C implementation if a repeat prefix is encountered.
11795 */
11796 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11797 {
11798 IEMOP_MNEMONIC("repe scasb al,Xb");
11799 switch (pIemCpu->enmEffAddrMode)
11800 {
11801 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11802 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11803 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11805 }
11806 }
11807 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11808 {
11809 IEMOP_MNEMONIC("repne scasb al,Xb");
11810 switch (pIemCpu->enmEffAddrMode)
11811 {
11812 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11813 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11814 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11816 }
11817 }
11818 IEMOP_MNEMONIC("scasb al,Xb");
11819
11820 /*
11821 * Sharing case implementation with stos[wdq] below.
11822 */
11823 switch (pIemCpu->enmEffAddrMode)
11824 {
11825 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11826 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11827 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11829 }
11830 return VINF_SUCCESS;
11831}
11832
11833
11834/** Opcode 0xaf. */
11835FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11836{
11837 IEMOP_HLP_NO_LOCK_PREFIX();
11838
11839 /*
11840 * Use the C implementation if a repeat prefix is encountered.
11841 */
11842 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11843 {
11844 IEMOP_MNEMONIC("repe scas rAX,Xv");
11845 switch (pIemCpu->enmEffOpSize)
11846 {
11847 case IEMMODE_16BIT:
11848 switch (pIemCpu->enmEffAddrMode)
11849 {
11850 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11851 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11852 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11854 }
11855 break;
11856 case IEMMODE_32BIT:
11857 switch (pIemCpu->enmEffAddrMode)
11858 {
11859 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11860 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11861 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11863 }
11864 case IEMMODE_64BIT:
11865 switch (pIemCpu->enmEffAddrMode)
11866 {
11867 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11868 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11869 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11871 }
11872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11873 }
11874 }
11875 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11876 {
11877 IEMOP_MNEMONIC("repne scas rAX,Xv");
11878 switch (pIemCpu->enmEffOpSize)
11879 {
11880 case IEMMODE_16BIT:
11881 switch (pIemCpu->enmEffAddrMode)
11882 {
11883 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11884 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11885 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11887 }
11888 break;
11889 case IEMMODE_32BIT:
11890 switch (pIemCpu->enmEffAddrMode)
11891 {
11892 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11893 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11894 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11896 }
11897 case IEMMODE_64BIT:
11898 switch (pIemCpu->enmEffAddrMode)
11899 {
11900 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11901 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11902 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11904 }
11905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11906 }
11907 }
11908 IEMOP_MNEMONIC("scas rAX,Xv");
11909
11910 /*
11911 * Annoying double switch here.
11912 * Using ugly macro for implementing the cases, sharing it with scasb.
11913 */
11914 switch (pIemCpu->enmEffOpSize)
11915 {
11916 case IEMMODE_16BIT:
11917 switch (pIemCpu->enmEffAddrMode)
11918 {
11919 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11920 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11921 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11923 }
11924 break;
11925
11926 case IEMMODE_32BIT:
11927 switch (pIemCpu->enmEffAddrMode)
11928 {
11929 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11930 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11931 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11933 }
11934 break;
11935
11936 case IEMMODE_64BIT:
11937 switch (pIemCpu->enmEffAddrMode)
11938 {
11939 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11940 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11941 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11943 }
11944 break;
11945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11946 }
11947 return VINF_SUCCESS;
11948}
11949
11950#undef IEM_SCAS_CASE
11951
11952/**
11953 * Common 'mov r8, imm8' helper.
11954 */
11955FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11956{
11957 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11958 IEMOP_HLP_NO_LOCK_PREFIX();
11959
11960 IEM_MC_BEGIN(0, 1);
11961 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11962 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11963 IEM_MC_ADVANCE_RIP();
11964 IEM_MC_END();
11965
11966 return VINF_SUCCESS;
11967}
11968
11969
11970/** Opcode 0xb0. */
11971FNIEMOP_DEF(iemOp_mov_AL_Ib)
11972{
11973 IEMOP_MNEMONIC("mov AL,Ib");
11974 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11975}
11976
11977
11978/** Opcode 0xb1. */
11979FNIEMOP_DEF(iemOp_CL_Ib)
11980{
11981 IEMOP_MNEMONIC("mov CL,Ib");
11982 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11983}
11984
11985
11986/** Opcode 0xb2. */
11987FNIEMOP_DEF(iemOp_DL_Ib)
11988{
11989 IEMOP_MNEMONIC("mov DL,Ib");
11990 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11991}
11992
11993
11994/** Opcode 0xb3. */
11995FNIEMOP_DEF(iemOp_BL_Ib)
11996{
11997 IEMOP_MNEMONIC("mov BL,Ib");
11998 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11999}
12000
12001
12002/** Opcode 0xb4. */
12003FNIEMOP_DEF(iemOp_mov_AH_Ib)
12004{
12005 IEMOP_MNEMONIC("mov AH,Ib");
12006 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
12007}
12008
12009
12010/** Opcode 0xb5. */
12011FNIEMOP_DEF(iemOp_CH_Ib)
12012{
12013 IEMOP_MNEMONIC("mov CH,Ib");
12014 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
12015}
12016
12017
12018/** Opcode 0xb6. */
12019FNIEMOP_DEF(iemOp_DH_Ib)
12020{
12021 IEMOP_MNEMONIC("mov DH,Ib");
12022 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
12023}
12024
12025
12026/** Opcode 0xb7. */
12027FNIEMOP_DEF(iemOp_BH_Ib)
12028{
12029 IEMOP_MNEMONIC("mov BH,Ib");
12030 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
12031}
12032
12033
12034/**
12035 * Common 'mov regX,immX' helper.
12036 */
12037FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12038{
12039 switch (pIemCpu->enmEffOpSize)
12040 {
12041 case IEMMODE_16BIT:
12042 {
12043 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12044 IEMOP_HLP_NO_LOCK_PREFIX();
12045
12046 IEM_MC_BEGIN(0, 1);
12047 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12048 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12049 IEM_MC_ADVANCE_RIP();
12050 IEM_MC_END();
12051 break;
12052 }
12053
12054 case IEMMODE_32BIT:
12055 {
12056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12057 IEMOP_HLP_NO_LOCK_PREFIX();
12058
12059 IEM_MC_BEGIN(0, 1);
12060 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12061 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12062 IEM_MC_ADVANCE_RIP();
12063 IEM_MC_END();
12064 break;
12065 }
12066 case IEMMODE_64BIT:
12067 {
12068 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12069 IEMOP_HLP_NO_LOCK_PREFIX();
12070
12071 IEM_MC_BEGIN(0, 1);
12072 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12073 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12074 IEM_MC_ADVANCE_RIP();
12075 IEM_MC_END();
12076 break;
12077 }
12078 }
12079
12080 return VINF_SUCCESS;
12081}
12082
12083
12084/** Opcode 0xb8. */
12085FNIEMOP_DEF(iemOp_eAX_Iv)
12086{
12087 IEMOP_MNEMONIC("mov rAX,IV");
12088 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
12089}
12090
12091
12092/** Opcode 0xb9. */
12093FNIEMOP_DEF(iemOp_eCX_Iv)
12094{
12095 IEMOP_MNEMONIC("mov rCX,IV");
12096 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
12097}
12098
12099
12100/** Opcode 0xba. */
12101FNIEMOP_DEF(iemOp_eDX_Iv)
12102{
12103 IEMOP_MNEMONIC("mov rDX,IV");
12104 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
12105}
12106
12107
12108/** Opcode 0xbb. */
12109FNIEMOP_DEF(iemOp_eBX_Iv)
12110{
12111 IEMOP_MNEMONIC("mov rBX,IV");
12112 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
12113}
12114
12115
12116/** Opcode 0xbc. */
12117FNIEMOP_DEF(iemOp_eSP_Iv)
12118{
12119 IEMOP_MNEMONIC("mov rSP,IV");
12120 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12121}
12122
12123
12124/** Opcode 0xbd. */
12125FNIEMOP_DEF(iemOp_eBP_Iv)
12126{
12127 IEMOP_MNEMONIC("mov rBP,IV");
12128 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12129}
12130
12131
12132/** Opcode 0xbe. */
12133FNIEMOP_DEF(iemOp_eSI_Iv)
12134{
12135 IEMOP_MNEMONIC("mov rSI,IV");
12136 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12137}
12138
12139
12140/** Opcode 0xbf. */
12141FNIEMOP_DEF(iemOp_eDI_Iv)
12142{
12143 IEMOP_MNEMONIC("mov rDI,IV");
12144 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12145}
12146
12147
12148/** Opcode 0xc0. */
12149FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12150{
12151 IEMOP_HLP_MIN_186();
12152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12153 PCIEMOPSHIFTSIZES pImpl;
12154 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12155 {
12156 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12157 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12158 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12159 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12160 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12161 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12162 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12163 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12164 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12165 }
12166 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12167
12168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12169 {
12170 /* register */
12171 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12172 IEMOP_HLP_NO_LOCK_PREFIX();
12173 IEM_MC_BEGIN(3, 0);
12174 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12175 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12177 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12178 IEM_MC_REF_EFLAGS(pEFlags);
12179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12180 IEM_MC_ADVANCE_RIP();
12181 IEM_MC_END();
12182 }
12183 else
12184 {
12185 /* memory */
12186 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12187 IEM_MC_BEGIN(3, 2);
12188 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12189 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12192
12193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12194 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12195 IEM_MC_ASSIGN(cShiftArg, cShift);
12196 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12197 IEM_MC_FETCH_EFLAGS(EFlags);
12198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12199
12200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12201 IEM_MC_COMMIT_EFLAGS(EFlags);
12202 IEM_MC_ADVANCE_RIP();
12203 IEM_MC_END();
12204 }
12205 return VINF_SUCCESS;
12206}
12207
12208
12209/** Opcode 0xc1. */
12210FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12211{
12212 IEMOP_HLP_MIN_186();
12213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12214 PCIEMOPSHIFTSIZES pImpl;
12215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12216 {
12217 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12218 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12219 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12220 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12221 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12222 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12223 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12224 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12225 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12226 }
12227 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12228
12229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12230 {
12231 /* register */
12232 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12233 IEMOP_HLP_NO_LOCK_PREFIX();
12234 switch (pIemCpu->enmEffOpSize)
12235 {
12236 case IEMMODE_16BIT:
12237 IEM_MC_BEGIN(3, 0);
12238 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12239 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12241 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12242 IEM_MC_REF_EFLAGS(pEFlags);
12243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12244 IEM_MC_ADVANCE_RIP();
12245 IEM_MC_END();
12246 return VINF_SUCCESS;
12247
12248 case IEMMODE_32BIT:
12249 IEM_MC_BEGIN(3, 0);
12250 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12251 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12253 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12254 IEM_MC_REF_EFLAGS(pEFlags);
12255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12256 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12257 IEM_MC_ADVANCE_RIP();
12258 IEM_MC_END();
12259 return VINF_SUCCESS;
12260
12261 case IEMMODE_64BIT:
12262 IEM_MC_BEGIN(3, 0);
12263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12264 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12266 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12267 IEM_MC_REF_EFLAGS(pEFlags);
12268 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12269 IEM_MC_ADVANCE_RIP();
12270 IEM_MC_END();
12271 return VINF_SUCCESS;
12272
12273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12274 }
12275 }
12276 else
12277 {
12278 /* memory */
12279 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12280 switch (pIemCpu->enmEffOpSize)
12281 {
12282 case IEMMODE_16BIT:
12283 IEM_MC_BEGIN(3, 2);
12284 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12285 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12286 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12288
12289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12290 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12291 IEM_MC_ASSIGN(cShiftArg, cShift);
12292 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12293 IEM_MC_FETCH_EFLAGS(EFlags);
12294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12295
12296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12297 IEM_MC_COMMIT_EFLAGS(EFlags);
12298 IEM_MC_ADVANCE_RIP();
12299 IEM_MC_END();
12300 return VINF_SUCCESS;
12301
12302 case IEMMODE_32BIT:
12303 IEM_MC_BEGIN(3, 2);
12304 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12305 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12306 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12308
12309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12310 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12311 IEM_MC_ASSIGN(cShiftArg, cShift);
12312 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12313 IEM_MC_FETCH_EFLAGS(EFlags);
12314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12315
12316 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12317 IEM_MC_COMMIT_EFLAGS(EFlags);
12318 IEM_MC_ADVANCE_RIP();
12319 IEM_MC_END();
12320 return VINF_SUCCESS;
12321
12322 case IEMMODE_64BIT:
12323 IEM_MC_BEGIN(3, 2);
12324 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12325 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12326 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12328
12329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12330 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12331 IEM_MC_ASSIGN(cShiftArg, cShift);
12332 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12333 IEM_MC_FETCH_EFLAGS(EFlags);
12334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12335
12336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12337 IEM_MC_COMMIT_EFLAGS(EFlags);
12338 IEM_MC_ADVANCE_RIP();
12339 IEM_MC_END();
12340 return VINF_SUCCESS;
12341
12342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12343 }
12344 }
12345}
12346
12347
12348/** Opcode 0xc2. */
12349FNIEMOP_DEF(iemOp_retn_Iw)
12350{
12351 IEMOP_MNEMONIC("retn Iw");
12352 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12353 IEMOP_HLP_NO_LOCK_PREFIX();
12354 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12355 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12356}
12357
12358
12359/** Opcode 0xc3. */
12360FNIEMOP_DEF(iemOp_retn)
12361{
12362 IEMOP_MNEMONIC("retn");
12363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12364 IEMOP_HLP_NO_LOCK_PREFIX();
12365 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12366}
12367
12368
12369/** Opcode 0xc4. */
12370FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12371{
12372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12373 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12374 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12375 {
12376 IEMOP_MNEMONIC("2-byte-vex");
12377 /* The LES instruction is invalid 64-bit mode. In legacy and
12378 compatability mode it is invalid with MOD=3.
12379 The use as a VEX prefix is made possible by assigning the inverted
12380 REX.R to the top MOD bit, and the top bit in the inverted register
12381 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12382 to accessing registers 0..7 in this VEX form. */
12383 /** @todo VEX: Just use new tables for it. */
12384 return IEMOP_RAISE_INVALID_OPCODE();
12385 }
12386 IEMOP_MNEMONIC("les Gv,Mp");
12387 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12388}
12389
12390
12391/** Opcode 0xc5. */
12392FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12393{
12394 /* The LDS instruction is invalid 64-bit mode. In legacy and
12395 compatability mode it is invalid with MOD=3.
12396 The use as a VEX prefix is made possible by assigning the inverted
12397 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12398 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12400 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12401 {
12402 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12403 {
12404 IEMOP_MNEMONIC("lds Gv,Mp");
12405 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12406 }
12407 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12408 }
12409
12410 IEMOP_MNEMONIC("3-byte-vex");
12411 /** @todo Test when exctly the VEX conformance checks kick in during
12412 * instruction decoding and fetching (using \#PF). */
12413 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12414 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12415 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12416#if 0 /* will make sense of this next week... */
12417 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12418 &&
12419 )
12420 {
12421
12422 }
12423#endif
12424
12425 /** @todo VEX: Just use new tables for it. */
12426 return IEMOP_RAISE_INVALID_OPCODE();
12427}
12428
12429
12430/** Opcode 0xc6. */
12431FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12432{
12433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12434 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12435 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12436 return IEMOP_RAISE_INVALID_OPCODE();
12437 IEMOP_MNEMONIC("mov Eb,Ib");
12438
12439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12440 {
12441 /* register access */
12442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12443 IEM_MC_BEGIN(0, 0);
12444 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12445 IEM_MC_ADVANCE_RIP();
12446 IEM_MC_END();
12447 }
12448 else
12449 {
12450 /* memory access. */
12451 IEM_MC_BEGIN(0, 1);
12452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12454 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12455 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12456 IEM_MC_ADVANCE_RIP();
12457 IEM_MC_END();
12458 }
12459 return VINF_SUCCESS;
12460}
12461
12462
12463/** Opcode 0xc7. */
12464FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12465{
12466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12467 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12468 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12469 return IEMOP_RAISE_INVALID_OPCODE();
12470 IEMOP_MNEMONIC("mov Ev,Iz");
12471
12472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12473 {
12474 /* register access */
12475 switch (pIemCpu->enmEffOpSize)
12476 {
12477 case IEMMODE_16BIT:
12478 IEM_MC_BEGIN(0, 0);
12479 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12480 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12481 IEM_MC_ADVANCE_RIP();
12482 IEM_MC_END();
12483 return VINF_SUCCESS;
12484
12485 case IEMMODE_32BIT:
12486 IEM_MC_BEGIN(0, 0);
12487 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12488 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12489 IEM_MC_ADVANCE_RIP();
12490 IEM_MC_END();
12491 return VINF_SUCCESS;
12492
12493 case IEMMODE_64BIT:
12494 IEM_MC_BEGIN(0, 0);
12495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12496 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12497 IEM_MC_ADVANCE_RIP();
12498 IEM_MC_END();
12499 return VINF_SUCCESS;
12500
12501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12502 }
12503 }
12504 else
12505 {
12506 /* memory access. */
12507 switch (pIemCpu->enmEffOpSize)
12508 {
12509 case IEMMODE_16BIT:
12510 IEM_MC_BEGIN(0, 1);
12511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12513 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12514 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12515 IEM_MC_ADVANCE_RIP();
12516 IEM_MC_END();
12517 return VINF_SUCCESS;
12518
12519 case IEMMODE_32BIT:
12520 IEM_MC_BEGIN(0, 1);
12521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12524 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12525 IEM_MC_ADVANCE_RIP();
12526 IEM_MC_END();
12527 return VINF_SUCCESS;
12528
12529 case IEMMODE_64BIT:
12530 IEM_MC_BEGIN(0, 1);
12531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12533 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12534 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12535 IEM_MC_ADVANCE_RIP();
12536 IEM_MC_END();
12537 return VINF_SUCCESS;
12538
12539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12540 }
12541 }
12542}
12543
12544
12545
12546
12547/** Opcode 0xc8. */
12548FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12549{
12550 IEMOP_MNEMONIC("enter Iw,Ib");
12551 IEMOP_HLP_MIN_186();
12552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12553 IEMOP_HLP_NO_LOCK_PREFIX();
12554 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12555 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12556 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12557}
12558
12559
12560/** Opcode 0xc9. */
12561FNIEMOP_DEF(iemOp_leave)
12562{
12563 IEMOP_MNEMONIC("retn");
12564 IEMOP_HLP_MIN_186();
12565 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12566 IEMOP_HLP_NO_LOCK_PREFIX();
12567 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12568}
12569
12570
12571/** Opcode 0xca. */
12572FNIEMOP_DEF(iemOp_retf_Iw)
12573{
12574 IEMOP_MNEMONIC("retf Iw");
12575 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12576 IEMOP_HLP_NO_LOCK_PREFIX();
12577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12578 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12579}
12580
12581
12582/** Opcode 0xcb. */
12583FNIEMOP_DEF(iemOp_retf)
12584{
12585 IEMOP_MNEMONIC("retf");
12586 IEMOP_HLP_NO_LOCK_PREFIX();
12587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12588 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12589}
12590
12591
12592/** Opcode 0xcc. */
12593FNIEMOP_DEF(iemOp_int_3)
12594{
12595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12596 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12597}
12598
12599
12600/** Opcode 0xcd. */
12601FNIEMOP_DEF(iemOp_int_Ib)
12602{
12603 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12605 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12606}
12607
12608
12609/** Opcode 0xce. */
12610FNIEMOP_DEF(iemOp_into)
12611{
12612 IEMOP_MNEMONIC("into");
12613 IEMOP_HLP_NO_64BIT();
12614
12615 IEM_MC_BEGIN(2, 0);
12616 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12617 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12618 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12619 IEM_MC_END();
12620 return VINF_SUCCESS;
12621}
12622
12623
12624/** Opcode 0xcf. */
12625FNIEMOP_DEF(iemOp_iret)
12626{
12627 IEMOP_MNEMONIC("iret");
12628 IEMOP_HLP_NO_LOCK_PREFIX();
12629 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12630}
12631
12632
12633/** Opcode 0xd0. */
12634FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12635{
12636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12637 PCIEMOPSHIFTSIZES pImpl;
12638 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12639 {
12640 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12641 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12642 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12643 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12644 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12645 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12646 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12647 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12648 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12649 }
12650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12651
12652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12653 {
12654 /* register */
12655 IEMOP_HLP_NO_LOCK_PREFIX();
12656 IEM_MC_BEGIN(3, 0);
12657 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12658 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12660 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12661 IEM_MC_REF_EFLAGS(pEFlags);
12662 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12663 IEM_MC_ADVANCE_RIP();
12664 IEM_MC_END();
12665 }
12666 else
12667 {
12668 /* memory */
12669 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12670 IEM_MC_BEGIN(3, 2);
12671 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12672 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12673 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12675
12676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12677 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12678 IEM_MC_FETCH_EFLAGS(EFlags);
12679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12680
12681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12682 IEM_MC_COMMIT_EFLAGS(EFlags);
12683 IEM_MC_ADVANCE_RIP();
12684 IEM_MC_END();
12685 }
12686 return VINF_SUCCESS;
12687}
12688
12689
12690
12691/** Opcode 0xd1. */
12692FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12693{
12694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12695 PCIEMOPSHIFTSIZES pImpl;
12696 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12697 {
12698 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12699 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12700 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12701 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12702 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12703 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12704 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12705 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12706 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12707 }
12708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12709
12710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12711 {
12712 /* register */
12713 IEMOP_HLP_NO_LOCK_PREFIX();
12714 switch (pIemCpu->enmEffOpSize)
12715 {
12716 case IEMMODE_16BIT:
12717 IEM_MC_BEGIN(3, 0);
12718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12719 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12720 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12721 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12722 IEM_MC_REF_EFLAGS(pEFlags);
12723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12724 IEM_MC_ADVANCE_RIP();
12725 IEM_MC_END();
12726 return VINF_SUCCESS;
12727
12728 case IEMMODE_32BIT:
12729 IEM_MC_BEGIN(3, 0);
12730 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12731 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12732 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12733 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12734 IEM_MC_REF_EFLAGS(pEFlags);
12735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12736 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12737 IEM_MC_ADVANCE_RIP();
12738 IEM_MC_END();
12739 return VINF_SUCCESS;
12740
12741 case IEMMODE_64BIT:
12742 IEM_MC_BEGIN(3, 0);
12743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12744 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12745 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12746 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12747 IEM_MC_REF_EFLAGS(pEFlags);
12748 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12749 IEM_MC_ADVANCE_RIP();
12750 IEM_MC_END();
12751 return VINF_SUCCESS;
12752
12753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12754 }
12755 }
12756 else
12757 {
12758 /* memory */
12759 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12760 switch (pIemCpu->enmEffOpSize)
12761 {
12762 case IEMMODE_16BIT:
12763 IEM_MC_BEGIN(3, 2);
12764 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12765 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12766 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12768
12769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12770 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12771 IEM_MC_FETCH_EFLAGS(EFlags);
12772 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12773
12774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12775 IEM_MC_COMMIT_EFLAGS(EFlags);
12776 IEM_MC_ADVANCE_RIP();
12777 IEM_MC_END();
12778 return VINF_SUCCESS;
12779
12780 case IEMMODE_32BIT:
12781 IEM_MC_BEGIN(3, 2);
12782 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12783 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12784 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12786
12787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12788 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12789 IEM_MC_FETCH_EFLAGS(EFlags);
12790 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12791
12792 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12793 IEM_MC_COMMIT_EFLAGS(EFlags);
12794 IEM_MC_ADVANCE_RIP();
12795 IEM_MC_END();
12796 return VINF_SUCCESS;
12797
12798 case IEMMODE_64BIT:
12799 IEM_MC_BEGIN(3, 2);
12800 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12801 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12802 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12804
12805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12806 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12807 IEM_MC_FETCH_EFLAGS(EFlags);
12808 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12809
12810 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12811 IEM_MC_COMMIT_EFLAGS(EFlags);
12812 IEM_MC_ADVANCE_RIP();
12813 IEM_MC_END();
12814 return VINF_SUCCESS;
12815
12816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12817 }
12818 }
12819}
12820
12821
12822/** Opcode 0xd2. */
12823FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12824{
12825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12826 PCIEMOPSHIFTSIZES pImpl;
12827 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12828 {
12829 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12830 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12831 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12832 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12833 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12834 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12835 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12836 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12837 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12838 }
12839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12840
12841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12842 {
12843 /* register */
12844 IEMOP_HLP_NO_LOCK_PREFIX();
12845 IEM_MC_BEGIN(3, 0);
12846 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12847 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12849 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12850 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12851 IEM_MC_REF_EFLAGS(pEFlags);
12852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12853 IEM_MC_ADVANCE_RIP();
12854 IEM_MC_END();
12855 }
12856 else
12857 {
12858 /* memory */
12859 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12860 IEM_MC_BEGIN(3, 2);
12861 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12862 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12863 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12865
12866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12867 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12868 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12869 IEM_MC_FETCH_EFLAGS(EFlags);
12870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12871
12872 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12873 IEM_MC_COMMIT_EFLAGS(EFlags);
12874 IEM_MC_ADVANCE_RIP();
12875 IEM_MC_END();
12876 }
12877 return VINF_SUCCESS;
12878}
12879
12880
12881/** Opcode 0xd3. */
12882FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12883{
12884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12885 PCIEMOPSHIFTSIZES pImpl;
12886 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12887 {
12888 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12889 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12890 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12891 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12892 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12893 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12894 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12895 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12896 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12897 }
12898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12899
12900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12901 {
12902 /* register */
12903 IEMOP_HLP_NO_LOCK_PREFIX();
12904 switch (pIemCpu->enmEffOpSize)
12905 {
12906 case IEMMODE_16BIT:
12907 IEM_MC_BEGIN(3, 0);
12908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12909 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12910 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12911 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12912 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12913 IEM_MC_REF_EFLAGS(pEFlags);
12914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12915 IEM_MC_ADVANCE_RIP();
12916 IEM_MC_END();
12917 return VINF_SUCCESS;
12918
12919 case IEMMODE_32BIT:
12920 IEM_MC_BEGIN(3, 0);
12921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12922 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12923 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12924 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12925 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12926 IEM_MC_REF_EFLAGS(pEFlags);
12927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12928 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12929 IEM_MC_ADVANCE_RIP();
12930 IEM_MC_END();
12931 return VINF_SUCCESS;
12932
12933 case IEMMODE_64BIT:
12934 IEM_MC_BEGIN(3, 0);
12935 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12936 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12937 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12938 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12939 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12940 IEM_MC_REF_EFLAGS(pEFlags);
12941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12942 IEM_MC_ADVANCE_RIP();
12943 IEM_MC_END();
12944 return VINF_SUCCESS;
12945
12946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12947 }
12948 }
12949 else
12950 {
12951 /* memory */
12952 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12953 switch (pIemCpu->enmEffOpSize)
12954 {
12955 case IEMMODE_16BIT:
12956 IEM_MC_BEGIN(3, 2);
12957 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12959 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12961
12962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12963 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12964 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12965 IEM_MC_FETCH_EFLAGS(EFlags);
12966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12967
12968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12969 IEM_MC_COMMIT_EFLAGS(EFlags);
12970 IEM_MC_ADVANCE_RIP();
12971 IEM_MC_END();
12972 return VINF_SUCCESS;
12973
12974 case IEMMODE_32BIT:
12975 IEM_MC_BEGIN(3, 2);
12976 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12977 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12978 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12980
12981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12982 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12983 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12984 IEM_MC_FETCH_EFLAGS(EFlags);
12985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12986
12987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12988 IEM_MC_COMMIT_EFLAGS(EFlags);
12989 IEM_MC_ADVANCE_RIP();
12990 IEM_MC_END();
12991 return VINF_SUCCESS;
12992
12993 case IEMMODE_64BIT:
12994 IEM_MC_BEGIN(3, 2);
12995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12996 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12997 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12999
13000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13001 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13002 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13003 IEM_MC_FETCH_EFLAGS(EFlags);
13004 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13005
13006 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13007 IEM_MC_COMMIT_EFLAGS(EFlags);
13008 IEM_MC_ADVANCE_RIP();
13009 IEM_MC_END();
13010 return VINF_SUCCESS;
13011
13012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13013 }
13014 }
13015}
13016
13017/** Opcode 0xd4. */
13018FNIEMOP_DEF(iemOp_aam_Ib)
13019{
13020 IEMOP_MNEMONIC("aam Ib");
13021 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13022 IEMOP_HLP_NO_LOCK_PREFIX();
13023 IEMOP_HLP_NO_64BIT();
13024 if (!bImm)
13025 return IEMOP_RAISE_DIVIDE_ERROR();
13026 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13027}
13028
13029
13030/** Opcode 0xd5. */
13031FNIEMOP_DEF(iemOp_aad_Ib)
13032{
13033 IEMOP_MNEMONIC("aad Ib");
13034 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13035 IEMOP_HLP_NO_LOCK_PREFIX();
13036 IEMOP_HLP_NO_64BIT();
13037 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13038}
13039
13040
13041/** Opcode 0xd6. */
13042FNIEMOP_DEF(iemOp_salc)
13043{
13044 IEMOP_MNEMONIC("salc");
13045 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13046 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13048 IEMOP_HLP_NO_64BIT();
13049
13050 IEM_MC_BEGIN(0, 0);
13051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13052 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13053 } IEM_MC_ELSE() {
13054 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13055 } IEM_MC_ENDIF();
13056 IEM_MC_ADVANCE_RIP();
13057 IEM_MC_END();
13058 return VINF_SUCCESS;
13059}
13060
13061
13062/** Opcode 0xd7. */
13063FNIEMOP_DEF(iemOp_xlat)
13064{
13065 IEMOP_MNEMONIC("xlat");
13066 IEMOP_HLP_NO_LOCK_PREFIX();
13067 switch (pIemCpu->enmEffAddrMode)
13068 {
13069 case IEMMODE_16BIT:
13070 IEM_MC_BEGIN(2, 0);
13071 IEM_MC_LOCAL(uint8_t, u8Tmp);
13072 IEM_MC_LOCAL(uint16_t, u16Addr);
13073 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13074 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13075 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
13076 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13077 IEM_MC_ADVANCE_RIP();
13078 IEM_MC_END();
13079 return VINF_SUCCESS;
13080
13081 case IEMMODE_32BIT:
13082 IEM_MC_BEGIN(2, 0);
13083 IEM_MC_LOCAL(uint8_t, u8Tmp);
13084 IEM_MC_LOCAL(uint32_t, u32Addr);
13085 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13086 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13087 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
13088 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13089 IEM_MC_ADVANCE_RIP();
13090 IEM_MC_END();
13091 return VINF_SUCCESS;
13092
13093 case IEMMODE_64BIT:
13094 IEM_MC_BEGIN(2, 0);
13095 IEM_MC_LOCAL(uint8_t, u8Tmp);
13096 IEM_MC_LOCAL(uint64_t, u64Addr);
13097 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13098 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13099 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
13100 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13101 IEM_MC_ADVANCE_RIP();
13102 IEM_MC_END();
13103 return VINF_SUCCESS;
13104
13105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13106 }
13107}
13108
13109
13110/**
13111 * Common worker for FPU instructions working on ST0 and STn, and storing the
13112 * result in ST0.
13113 *
13114 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13115 */
13116FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13117{
13118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13119
13120 IEM_MC_BEGIN(3, 1);
13121 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13122 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13125
13126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13127 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13128 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13129 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13130 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13131 IEM_MC_ELSE()
13132 IEM_MC_FPU_STACK_UNDERFLOW(0);
13133 IEM_MC_ENDIF();
13134 IEM_MC_USED_FPU();
13135 IEM_MC_ADVANCE_RIP();
13136
13137 IEM_MC_END();
13138 return VINF_SUCCESS;
13139}
13140
13141
13142/**
13143 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13144 * flags.
13145 *
13146 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13147 */
13148FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13149{
13150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13151
13152 IEM_MC_BEGIN(3, 1);
13153 IEM_MC_LOCAL(uint16_t, u16Fsw);
13154 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13157
13158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13160 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13161 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13162 IEM_MC_UPDATE_FSW(u16Fsw);
13163 IEM_MC_ELSE()
13164 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13165 IEM_MC_ENDIF();
13166 IEM_MC_USED_FPU();
13167 IEM_MC_ADVANCE_RIP();
13168
13169 IEM_MC_END();
13170 return VINF_SUCCESS;
13171}
13172
13173
13174/**
13175 * Common worker for FPU instructions working on ST0 and STn, only affecting
13176 * flags, and popping when done.
13177 *
13178 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13179 */
13180FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13181{
13182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13183
13184 IEM_MC_BEGIN(3, 1);
13185 IEM_MC_LOCAL(uint16_t, u16Fsw);
13186 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13189
13190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13192 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13193 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13194 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13195 IEM_MC_ELSE()
13196 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13197 IEM_MC_ENDIF();
13198 IEM_MC_USED_FPU();
13199 IEM_MC_ADVANCE_RIP();
13200
13201 IEM_MC_END();
13202 return VINF_SUCCESS;
13203}
13204
13205
13206/** Opcode 0xd8 11/0. */
13207FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13208{
13209 IEMOP_MNEMONIC("fadd st0,stN");
13210 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13211}
13212
13213
13214/** Opcode 0xd8 11/1. */
13215FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13216{
13217 IEMOP_MNEMONIC("fmul st0,stN");
13218 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13219}
13220
13221
13222/** Opcode 0xd8 11/2. */
13223FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13224{
13225 IEMOP_MNEMONIC("fcom st0,stN");
13226 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13227}
13228
13229
13230/** Opcode 0xd8 11/3. */
13231FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13232{
13233 IEMOP_MNEMONIC("fcomp st0,stN");
13234 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13235}
13236
13237
13238/** Opcode 0xd8 11/4. */
13239FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13240{
13241 IEMOP_MNEMONIC("fsub st0,stN");
13242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13243}
13244
13245
13246/** Opcode 0xd8 11/5. */
13247FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13248{
13249 IEMOP_MNEMONIC("fsubr st0,stN");
13250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13251}
13252
13253
13254/** Opcode 0xd8 11/6. */
13255FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13256{
13257 IEMOP_MNEMONIC("fdiv st0,stN");
13258 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13259}
13260
13261
13262/** Opcode 0xd8 11/7. */
13263FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13264{
13265 IEMOP_MNEMONIC("fdivr st0,stN");
13266 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13267}
13268
13269
13270/**
13271 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13272 * the result in ST0.
13273 *
13274 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13275 */
13276FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13277{
13278 IEM_MC_BEGIN(3, 3);
13279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13280 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13281 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13282 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13283 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13284 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13285
13286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13288
13289 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13290 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13291 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13292
13293 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13294 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13295 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13296 IEM_MC_ELSE()
13297 IEM_MC_FPU_STACK_UNDERFLOW(0);
13298 IEM_MC_ENDIF();
13299 IEM_MC_USED_FPU();
13300 IEM_MC_ADVANCE_RIP();
13301
13302 IEM_MC_END();
13303 return VINF_SUCCESS;
13304}
13305
13306
13307/** Opcode 0xd8 !11/0. */
13308FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13309{
13310 IEMOP_MNEMONIC("fadd st0,m32r");
13311 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13312}
13313
13314
13315/** Opcode 0xd8 !11/1. */
13316FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13317{
13318 IEMOP_MNEMONIC("fmul st0,m32r");
13319 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13320}
13321
13322
13323/** Opcode 0xd8 !11/2. */
13324FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13325{
13326 IEMOP_MNEMONIC("fcom st0,m32r");
13327
13328 IEM_MC_BEGIN(3, 3);
13329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13330 IEM_MC_LOCAL(uint16_t, u16Fsw);
13331 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13332 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13334 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13335
13336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13338
13339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13341 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13342
13343 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13344 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13345 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13346 IEM_MC_ELSE()
13347 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13348 IEM_MC_ENDIF();
13349 IEM_MC_USED_FPU();
13350 IEM_MC_ADVANCE_RIP();
13351
13352 IEM_MC_END();
13353 return VINF_SUCCESS;
13354}
13355
13356
13357/** Opcode 0xd8 !11/3. */
13358FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13359{
13360 IEMOP_MNEMONIC("fcomp st0,m32r");
13361
13362 IEM_MC_BEGIN(3, 3);
13363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13364 IEM_MC_LOCAL(uint16_t, u16Fsw);
13365 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13366 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13367 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13368 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13369
13370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13372
13373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13375 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13376
13377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13379 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13380 IEM_MC_ELSE()
13381 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13382 IEM_MC_ENDIF();
13383 IEM_MC_USED_FPU();
13384 IEM_MC_ADVANCE_RIP();
13385
13386 IEM_MC_END();
13387 return VINF_SUCCESS;
13388}
13389
13390
13391/** Opcode 0xd8 !11/4. */
13392FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13393{
13394 IEMOP_MNEMONIC("fsub st0,m32r");
13395 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13396}
13397
13398
13399/** Opcode 0xd8 !11/5. */
13400FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13401{
13402 IEMOP_MNEMONIC("fsubr st0,m32r");
13403 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13404}
13405
13406
13407/** Opcode 0xd8 !11/6. */
13408FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13409{
13410 IEMOP_MNEMONIC("fdiv st0,m32r");
13411 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13412}
13413
13414
13415/** Opcode 0xd8 !11/7. */
13416FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13417{
13418 IEMOP_MNEMONIC("fdivr st0,m32r");
13419 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13420}
13421
13422
13423/** Opcode 0xd8. */
13424FNIEMOP_DEF(iemOp_EscF0)
13425{
13426 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13428
13429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13430 {
13431 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13432 {
13433 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13434 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13435 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13436 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13437 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13438 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13439 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13440 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13442 }
13443 }
13444 else
13445 {
13446 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13447 {
13448 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13449 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13450 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13451 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13452 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13453 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13454 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13455 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13457 }
13458 }
13459}
13460
13461
13462/** Opcode 0xd9 /0 mem32real
13463 * @sa iemOp_fld_m64r */
13464FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13465{
13466 IEMOP_MNEMONIC("fld m32r");
13467
13468 IEM_MC_BEGIN(2, 3);
13469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13470 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13471 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13472 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13473 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13474
13475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13477
13478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13480 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13481
13482 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13483 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13484 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13485 IEM_MC_ELSE()
13486 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13487 IEM_MC_ENDIF();
13488 IEM_MC_USED_FPU();
13489 IEM_MC_ADVANCE_RIP();
13490
13491 IEM_MC_END();
13492 return VINF_SUCCESS;
13493}
13494
13495
13496/** Opcode 0xd9 !11/2 mem32real */
13497FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13498{
13499 IEMOP_MNEMONIC("fst m32r");
13500 IEM_MC_BEGIN(3, 2);
13501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13502 IEM_MC_LOCAL(uint16_t, u16Fsw);
13503 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13504 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13505 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13506
13507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13511
13512 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13513 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13514 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13515 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13516 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13517 IEM_MC_ELSE()
13518 IEM_MC_IF_FCW_IM()
13519 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13520 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13521 IEM_MC_ENDIF();
13522 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13523 IEM_MC_ENDIF();
13524 IEM_MC_USED_FPU();
13525 IEM_MC_ADVANCE_RIP();
13526
13527 IEM_MC_END();
13528 return VINF_SUCCESS;
13529}
13530
13531
13532/** Opcode 0xd9 !11/3 */
13533FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13534{
13535 IEMOP_MNEMONIC("fstp m32r");
13536 IEM_MC_BEGIN(3, 2);
13537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13538 IEM_MC_LOCAL(uint16_t, u16Fsw);
13539 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13540 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13542
13543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13546 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13547
13548 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13549 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13550 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13551 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13552 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13553 IEM_MC_ELSE()
13554 IEM_MC_IF_FCW_IM()
13555 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13556 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13557 IEM_MC_ENDIF();
13558 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13559 IEM_MC_ENDIF();
13560 IEM_MC_USED_FPU();
13561 IEM_MC_ADVANCE_RIP();
13562
13563 IEM_MC_END();
13564 return VINF_SUCCESS;
13565}
13566
13567
13568/** Opcode 0xd9 !11/4 */
13569FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13570{
13571 IEMOP_MNEMONIC("fldenv m14/28byte");
13572 IEM_MC_BEGIN(3, 0);
13573 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13574 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13575 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13579 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13580 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13581 IEM_MC_END();
13582 return VINF_SUCCESS;
13583}
13584
13585
13586/** Opcode 0xd9 !11/5 */
13587FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13588{
13589 IEMOP_MNEMONIC("fldcw m2byte");
13590 IEM_MC_BEGIN(1, 1);
13591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13592 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13596 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13597 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13598 IEM_MC_END();
13599 return VINF_SUCCESS;
13600}
13601
13602
13603/** Opcode 0xd9 !11/6 */
13604FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13605{
13606 IEMOP_MNEMONIC("fstenv m14/m28byte");
13607 IEM_MC_BEGIN(3, 0);
13608 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13609 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13610 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13613 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13614 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13615 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13616 IEM_MC_END();
13617 return VINF_SUCCESS;
13618}
13619
13620
13621/** Opcode 0xd9 !11/7 */
13622FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13623{
13624 IEMOP_MNEMONIC("fnstcw m2byte");
13625 IEM_MC_BEGIN(2, 0);
13626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13627 IEM_MC_LOCAL(uint16_t, u16Fcw);
13628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13631 IEM_MC_FETCH_FCW(u16Fcw);
13632 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13633 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13634 IEM_MC_END();
13635 return VINF_SUCCESS;
13636}
13637
13638
13639/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13640FNIEMOP_DEF(iemOp_fnop)
13641{
13642 IEMOP_MNEMONIC("fnop");
13643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13644
13645 IEM_MC_BEGIN(0, 0);
13646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13648 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13649 * intel optimizations. Investigate. */
13650 IEM_MC_UPDATE_FPU_OPCODE_IP();
13651 IEM_MC_USED_FPU();
13652 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13653 IEM_MC_END();
13654 return VINF_SUCCESS;
13655}
13656
13657
13658/** Opcode 0xd9 11/0 stN */
13659FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13660{
13661 IEMOP_MNEMONIC("fld stN");
13662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13663
13664 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13665 * indicates that it does. */
13666 IEM_MC_BEGIN(0, 2);
13667 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13668 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13671 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13672 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13673 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13674 IEM_MC_ELSE()
13675 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13676 IEM_MC_ENDIF();
13677 IEM_MC_USED_FPU();
13678 IEM_MC_ADVANCE_RIP();
13679 IEM_MC_END();
13680
13681 return VINF_SUCCESS;
13682}
13683
13684
13685/** Opcode 0xd9 11/3 stN */
13686FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13687{
13688 IEMOP_MNEMONIC("fxch stN");
13689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13690
13691 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13692 * indicates that it does. */
13693 IEM_MC_BEGIN(1, 3);
13694 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13695 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13697 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13700 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13701 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13702 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13703 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13704 IEM_MC_ELSE()
13705 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13706 IEM_MC_ENDIF();
13707 IEM_MC_USED_FPU();
13708 IEM_MC_ADVANCE_RIP();
13709 IEM_MC_END();
13710
13711 return VINF_SUCCESS;
13712}
13713
13714
13715/** Opcode 0xd9 11/4, 0xdd 11/2. */
13716FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13717{
13718 IEMOP_MNEMONIC("fstp st0,stN");
13719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13720
13721 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13722 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13723 if (!iDstReg)
13724 {
13725 IEM_MC_BEGIN(0, 1);
13726 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13729 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13730 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13731 IEM_MC_ELSE()
13732 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13733 IEM_MC_ENDIF();
13734 IEM_MC_USED_FPU();
13735 IEM_MC_ADVANCE_RIP();
13736 IEM_MC_END();
13737 }
13738 else
13739 {
13740 IEM_MC_BEGIN(0, 2);
13741 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13745 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13746 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13747 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13748 IEM_MC_ELSE()
13749 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13750 IEM_MC_ENDIF();
13751 IEM_MC_USED_FPU();
13752 IEM_MC_ADVANCE_RIP();
13753 IEM_MC_END();
13754 }
13755 return VINF_SUCCESS;
13756}
13757
13758
13759/**
13760 * Common worker for FPU instructions working on ST0 and replaces it with the
13761 * result, i.e. unary operators.
13762 *
13763 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13764 */
13765FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13766{
13767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13768
13769 IEM_MC_BEGIN(2, 1);
13770 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13771 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13772 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13773
13774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13777 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13778 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13779 IEM_MC_ELSE()
13780 IEM_MC_FPU_STACK_UNDERFLOW(0);
13781 IEM_MC_ENDIF();
13782 IEM_MC_USED_FPU();
13783 IEM_MC_ADVANCE_RIP();
13784
13785 IEM_MC_END();
13786 return VINF_SUCCESS;
13787}
13788
13789
13790/** Opcode 0xd9 0xe0. */
13791FNIEMOP_DEF(iemOp_fchs)
13792{
13793 IEMOP_MNEMONIC("fchs st0");
13794 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13795}
13796
13797
13798/** Opcode 0xd9 0xe1. */
13799FNIEMOP_DEF(iemOp_fabs)
13800{
13801 IEMOP_MNEMONIC("fabs st0");
13802 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13803}
13804
13805
13806/**
13807 * Common worker for FPU instructions working on ST0 and only returns FSW.
13808 *
13809 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13810 */
13811FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13812{
13813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13814
13815 IEM_MC_BEGIN(2, 1);
13816 IEM_MC_LOCAL(uint16_t, u16Fsw);
13817 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13818 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13819
13820 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13821 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13822 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13823 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13824 IEM_MC_UPDATE_FSW(u16Fsw);
13825 IEM_MC_ELSE()
13826 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13827 IEM_MC_ENDIF();
13828 IEM_MC_USED_FPU();
13829 IEM_MC_ADVANCE_RIP();
13830
13831 IEM_MC_END();
13832 return VINF_SUCCESS;
13833}
13834
13835
13836/** Opcode 0xd9 0xe4. */
13837FNIEMOP_DEF(iemOp_ftst)
13838{
13839 IEMOP_MNEMONIC("ftst st0");
13840 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13841}
13842
13843
13844/** Opcode 0xd9 0xe5. */
13845FNIEMOP_DEF(iemOp_fxam)
13846{
13847 IEMOP_MNEMONIC("fxam st0");
13848 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13849}
13850
13851
13852/**
13853 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13854 *
13855 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13856 */
13857FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13858{
13859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13860
13861 IEM_MC_BEGIN(1, 1);
13862 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13863 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13864
13865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13867 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13868 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13869 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13870 IEM_MC_ELSE()
13871 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13872 IEM_MC_ENDIF();
13873 IEM_MC_USED_FPU();
13874 IEM_MC_ADVANCE_RIP();
13875
13876 IEM_MC_END();
13877 return VINF_SUCCESS;
13878}
13879
13880
13881/** Opcode 0xd9 0xe8. */
13882FNIEMOP_DEF(iemOp_fld1)
13883{
13884 IEMOP_MNEMONIC("fld1");
13885 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13886}
13887
13888
13889/** Opcode 0xd9 0xe9. */
13890FNIEMOP_DEF(iemOp_fldl2t)
13891{
13892 IEMOP_MNEMONIC("fldl2t");
13893 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13894}
13895
13896
13897/** Opcode 0xd9 0xea. */
13898FNIEMOP_DEF(iemOp_fldl2e)
13899{
13900 IEMOP_MNEMONIC("fldl2e");
13901 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13902}
13903
13904/** Opcode 0xd9 0xeb. */
13905FNIEMOP_DEF(iemOp_fldpi)
13906{
13907 IEMOP_MNEMONIC("fldpi");
13908 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13909}
13910
13911
13912/** Opcode 0xd9 0xec. */
13913FNIEMOP_DEF(iemOp_fldlg2)
13914{
13915 IEMOP_MNEMONIC("fldlg2");
13916 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13917}
13918
13919/** Opcode 0xd9 0xed. */
13920FNIEMOP_DEF(iemOp_fldln2)
13921{
13922 IEMOP_MNEMONIC("fldln2");
13923 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13924}
13925
13926
13927/** Opcode 0xd9 0xee. */
13928FNIEMOP_DEF(iemOp_fldz)
13929{
13930 IEMOP_MNEMONIC("fldz");
13931 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13932}
13933
13934
13935/** Opcode 0xd9 0xf0. */
13936FNIEMOP_DEF(iemOp_f2xm1)
13937{
13938 IEMOP_MNEMONIC("f2xm1 st0");
13939 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13940}
13941
13942
13943/** Opcode 0xd9 0xf1. */
13944FNIEMOP_DEF(iemOp_fylx2)
13945{
13946 IEMOP_MNEMONIC("fylx2 st0");
13947 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13948}
13949
13950
13951/**
13952 * Common worker for FPU instructions working on ST0 and having two outputs, one
13953 * replacing ST0 and one pushed onto the stack.
13954 *
13955 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13956 */
13957FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13958{
13959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13960
13961 IEM_MC_BEGIN(2, 1);
13962 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13963 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13964 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13965
13966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13968 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13969 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13970 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13971 IEM_MC_ELSE()
13972 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13973 IEM_MC_ENDIF();
13974 IEM_MC_USED_FPU();
13975 IEM_MC_ADVANCE_RIP();
13976
13977 IEM_MC_END();
13978 return VINF_SUCCESS;
13979}
13980
13981
13982/** Opcode 0xd9 0xf2. */
13983FNIEMOP_DEF(iemOp_fptan)
13984{
13985 IEMOP_MNEMONIC("fptan st0");
13986 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13987}
13988
13989
13990/**
13991 * Common worker for FPU instructions working on STn and ST0, storing the result
13992 * in STn, and popping the stack unless IE, DE or ZE was raised.
13993 *
13994 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13995 */
13996FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13997{
13998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13999
14000 IEM_MC_BEGIN(3, 1);
14001 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14002 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14003 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14004 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14005
14006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14008
14009 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14010 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14011 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14012 IEM_MC_ELSE()
14013 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14014 IEM_MC_ENDIF();
14015 IEM_MC_USED_FPU();
14016 IEM_MC_ADVANCE_RIP();
14017
14018 IEM_MC_END();
14019 return VINF_SUCCESS;
14020}
14021
14022
14023/** Opcode 0xd9 0xf3. */
14024FNIEMOP_DEF(iemOp_fpatan)
14025{
14026 IEMOP_MNEMONIC("fpatan st1,st0");
14027 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14028}
14029
14030
14031/** Opcode 0xd9 0xf4. */
14032FNIEMOP_DEF(iemOp_fxtract)
14033{
14034 IEMOP_MNEMONIC("fxtract st0");
14035 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14036}
14037
14038
14039/** Opcode 0xd9 0xf5. */
14040FNIEMOP_DEF(iemOp_fprem1)
14041{
14042 IEMOP_MNEMONIC("fprem1 st0, st1");
14043 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14044}
14045
14046
14047/** Opcode 0xd9 0xf6. */
14048FNIEMOP_DEF(iemOp_fdecstp)
14049{
14050 IEMOP_MNEMONIC("fdecstp");
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14053 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14054 * FINCSTP and FDECSTP. */
14055
14056 IEM_MC_BEGIN(0,0);
14057
14058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14060
14061 IEM_MC_FPU_STACK_DEC_TOP();
14062 IEM_MC_UPDATE_FSW_CONST(0);
14063
14064 IEM_MC_USED_FPU();
14065 IEM_MC_ADVANCE_RIP();
14066 IEM_MC_END();
14067 return VINF_SUCCESS;
14068}
14069
14070
14071/** Opcode 0xd9 0xf7. */
14072FNIEMOP_DEF(iemOp_fincstp)
14073{
14074 IEMOP_MNEMONIC("fincstp");
14075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14076 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14077 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14078 * FINCSTP and FDECSTP. */
14079
14080 IEM_MC_BEGIN(0,0);
14081
14082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14084
14085 IEM_MC_FPU_STACK_INC_TOP();
14086 IEM_MC_UPDATE_FSW_CONST(0);
14087
14088 IEM_MC_USED_FPU();
14089 IEM_MC_ADVANCE_RIP();
14090 IEM_MC_END();
14091 return VINF_SUCCESS;
14092}
14093
14094
14095/** Opcode 0xd9 0xf8. */
14096FNIEMOP_DEF(iemOp_fprem)
14097{
14098 IEMOP_MNEMONIC("fprem st0, st1");
14099 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14100}
14101
14102
14103/** Opcode 0xd9 0xf9. */
14104FNIEMOP_DEF(iemOp_fyl2xp1)
14105{
14106 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14107 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14108}
14109
14110
14111/** Opcode 0xd9 0xfa. */
14112FNIEMOP_DEF(iemOp_fsqrt)
14113{
14114 IEMOP_MNEMONIC("fsqrt st0");
14115 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14116}
14117
14118
14119/** Opcode 0xd9 0xfb. */
14120FNIEMOP_DEF(iemOp_fsincos)
14121{
14122 IEMOP_MNEMONIC("fsincos st0");
14123 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14124}
14125
14126
14127/** Opcode 0xd9 0xfc. */
14128FNIEMOP_DEF(iemOp_frndint)
14129{
14130 IEMOP_MNEMONIC("frndint st0");
14131 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14132}
14133
14134
14135/** Opcode 0xd9 0xfd. */
14136FNIEMOP_DEF(iemOp_fscale)
14137{
14138 IEMOP_MNEMONIC("fscale st0, st1");
14139 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14140}
14141
14142
14143/** Opcode 0xd9 0xfe. */
14144FNIEMOP_DEF(iemOp_fsin)
14145{
14146 IEMOP_MNEMONIC("fsin st0");
14147 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14148}
14149
14150
14151/** Opcode 0xd9 0xff. */
14152FNIEMOP_DEF(iemOp_fcos)
14153{
14154 IEMOP_MNEMONIC("fcos st0");
14155 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14156}
14157
14158
14159/** Used by iemOp_EscF1. */
14160static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14161{
14162 /* 0xe0 */ iemOp_fchs,
14163 /* 0xe1 */ iemOp_fabs,
14164 /* 0xe2 */ iemOp_Invalid,
14165 /* 0xe3 */ iemOp_Invalid,
14166 /* 0xe4 */ iemOp_ftst,
14167 /* 0xe5 */ iemOp_fxam,
14168 /* 0xe6 */ iemOp_Invalid,
14169 /* 0xe7 */ iemOp_Invalid,
14170 /* 0xe8 */ iemOp_fld1,
14171 /* 0xe9 */ iemOp_fldl2t,
14172 /* 0xea */ iemOp_fldl2e,
14173 /* 0xeb */ iemOp_fldpi,
14174 /* 0xec */ iemOp_fldlg2,
14175 /* 0xed */ iemOp_fldln2,
14176 /* 0xee */ iemOp_fldz,
14177 /* 0xef */ iemOp_Invalid,
14178 /* 0xf0 */ iemOp_f2xm1,
14179 /* 0xf1 */ iemOp_fylx2,
14180 /* 0xf2 */ iemOp_fptan,
14181 /* 0xf3 */ iemOp_fpatan,
14182 /* 0xf4 */ iemOp_fxtract,
14183 /* 0xf5 */ iemOp_fprem1,
14184 /* 0xf6 */ iemOp_fdecstp,
14185 /* 0xf7 */ iemOp_fincstp,
14186 /* 0xf8 */ iemOp_fprem,
14187 /* 0xf9 */ iemOp_fyl2xp1,
14188 /* 0xfa */ iemOp_fsqrt,
14189 /* 0xfb */ iemOp_fsincos,
14190 /* 0xfc */ iemOp_frndint,
14191 /* 0xfd */ iemOp_fscale,
14192 /* 0xfe */ iemOp_fsin,
14193 /* 0xff */ iemOp_fcos
14194};
14195
14196
14197/** Opcode 0xd9. */
14198FNIEMOP_DEF(iemOp_EscF1)
14199{
14200 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14203 {
14204 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14205 {
14206 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14207 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14208 case 2:
14209 if (bRm == 0xd0)
14210 return FNIEMOP_CALL(iemOp_fnop);
14211 return IEMOP_RAISE_INVALID_OPCODE();
14212 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14213 case 4:
14214 case 5:
14215 case 6:
14216 case 7:
14217 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14218 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14220 }
14221 }
14222 else
14223 {
14224 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14225 {
14226 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14227 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14228 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14229 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14230 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14231 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14232 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14233 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14235 }
14236 }
14237}
14238
14239
14240/** Opcode 0xda 11/0. */
14241FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14242{
14243 IEMOP_MNEMONIC("fcmovb st0,stN");
14244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14245
14246 IEM_MC_BEGIN(0, 1);
14247 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14248
14249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14251
14252 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14254 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14255 IEM_MC_ENDIF();
14256 IEM_MC_UPDATE_FPU_OPCODE_IP();
14257 IEM_MC_ELSE()
14258 IEM_MC_FPU_STACK_UNDERFLOW(0);
14259 IEM_MC_ENDIF();
14260 IEM_MC_USED_FPU();
14261 IEM_MC_ADVANCE_RIP();
14262
14263 IEM_MC_END();
14264 return VINF_SUCCESS;
14265}
14266
14267
14268/** Opcode 0xda 11/1. */
14269FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14270{
14271 IEMOP_MNEMONIC("fcmove st0,stN");
14272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14273
14274 IEM_MC_BEGIN(0, 1);
14275 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14276
14277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14278 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14279
14280 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14282 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14283 IEM_MC_ENDIF();
14284 IEM_MC_UPDATE_FPU_OPCODE_IP();
14285 IEM_MC_ELSE()
14286 IEM_MC_FPU_STACK_UNDERFLOW(0);
14287 IEM_MC_ENDIF();
14288 IEM_MC_USED_FPU();
14289 IEM_MC_ADVANCE_RIP();
14290
14291 IEM_MC_END();
14292 return VINF_SUCCESS;
14293}
14294
14295
14296/** Opcode 0xda 11/2. */
14297FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14298{
14299 IEMOP_MNEMONIC("fcmovbe st0,stN");
14300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14301
14302 IEM_MC_BEGIN(0, 1);
14303 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14304
14305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14307
14308 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14309 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14310 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14311 IEM_MC_ENDIF();
14312 IEM_MC_UPDATE_FPU_OPCODE_IP();
14313 IEM_MC_ELSE()
14314 IEM_MC_FPU_STACK_UNDERFLOW(0);
14315 IEM_MC_ENDIF();
14316 IEM_MC_USED_FPU();
14317 IEM_MC_ADVANCE_RIP();
14318
14319 IEM_MC_END();
14320 return VINF_SUCCESS;
14321}
14322
14323
14324/** Opcode 0xda 11/3. */
14325FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14326{
14327 IEMOP_MNEMONIC("fcmovu st0,stN");
14328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14329
14330 IEM_MC_BEGIN(0, 1);
14331 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14332
14333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14335
14336 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14338 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14339 IEM_MC_ENDIF();
14340 IEM_MC_UPDATE_FPU_OPCODE_IP();
14341 IEM_MC_ELSE()
14342 IEM_MC_FPU_STACK_UNDERFLOW(0);
14343 IEM_MC_ENDIF();
14344 IEM_MC_USED_FPU();
14345 IEM_MC_ADVANCE_RIP();
14346
14347 IEM_MC_END();
14348 return VINF_SUCCESS;
14349}
14350
14351
14352/**
14353 * Common worker for FPU instructions working on ST0 and STn, only affecting
14354 * flags, and popping twice when done.
14355 *
14356 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14357 */
14358FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14359{
14360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14361
14362 IEM_MC_BEGIN(3, 1);
14363 IEM_MC_LOCAL(uint16_t, u16Fsw);
14364 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14365 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14366 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14367
14368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14370 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14371 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14372 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14373 IEM_MC_ELSE()
14374 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14375 IEM_MC_ENDIF();
14376 IEM_MC_USED_FPU();
14377 IEM_MC_ADVANCE_RIP();
14378
14379 IEM_MC_END();
14380 return VINF_SUCCESS;
14381}
14382
14383
14384/** Opcode 0xda 0xe9. */
14385FNIEMOP_DEF(iemOp_fucompp)
14386{
14387 IEMOP_MNEMONIC("fucompp st0,stN");
14388 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14389}
14390
14391
14392/**
14393 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14394 * the result in ST0.
14395 *
14396 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14397 */
14398FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14399{
14400 IEM_MC_BEGIN(3, 3);
14401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14402 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14403 IEM_MC_LOCAL(int32_t, i32Val2);
14404 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14405 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14406 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14407
14408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14410
14411 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14412 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14413 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14414
14415 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14416 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14417 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14418 IEM_MC_ELSE()
14419 IEM_MC_FPU_STACK_UNDERFLOW(0);
14420 IEM_MC_ENDIF();
14421 IEM_MC_USED_FPU();
14422 IEM_MC_ADVANCE_RIP();
14423
14424 IEM_MC_END();
14425 return VINF_SUCCESS;
14426}
14427
14428
14429/** Opcode 0xda !11/0. */
14430FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14431{
14432 IEMOP_MNEMONIC("fiadd m32i");
14433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14434}
14435
14436
14437/** Opcode 0xda !11/1. */
14438FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14439{
14440 IEMOP_MNEMONIC("fimul m32i");
14441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14442}
14443
14444
14445/** Opcode 0xda !11/2. */
14446FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14447{
14448 IEMOP_MNEMONIC("ficom st0,m32i");
14449
14450 IEM_MC_BEGIN(3, 3);
14451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14452 IEM_MC_LOCAL(uint16_t, u16Fsw);
14453 IEM_MC_LOCAL(int32_t, i32Val2);
14454 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14456 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14457
14458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14460
14461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14463 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14464
14465 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14466 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14467 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14468 IEM_MC_ELSE()
14469 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14470 IEM_MC_ENDIF();
14471 IEM_MC_USED_FPU();
14472 IEM_MC_ADVANCE_RIP();
14473
14474 IEM_MC_END();
14475 return VINF_SUCCESS;
14476}
14477
14478
14479/** Opcode 0xda !11/3. */
14480FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14481{
14482 IEMOP_MNEMONIC("ficomp st0,m32i");
14483
14484 IEM_MC_BEGIN(3, 3);
14485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14486 IEM_MC_LOCAL(uint16_t, u16Fsw);
14487 IEM_MC_LOCAL(int32_t, i32Val2);
14488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14490 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14491
14492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14494
14495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14497 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14498
14499 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14500 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14501 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14502 IEM_MC_ELSE()
14503 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14504 IEM_MC_ENDIF();
14505 IEM_MC_USED_FPU();
14506 IEM_MC_ADVANCE_RIP();
14507
14508 IEM_MC_END();
14509 return VINF_SUCCESS;
14510}
14511
14512
14513/** Opcode 0xda !11/4. */
14514FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14515{
14516 IEMOP_MNEMONIC("fisub m32i");
14517 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14518}
14519
14520
14521/** Opcode 0xda !11/5. */
14522FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14523{
14524 IEMOP_MNEMONIC("fisubr m32i");
14525 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14526}
14527
14528
14529/** Opcode 0xda !11/6. */
14530FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14531{
14532 IEMOP_MNEMONIC("fidiv m32i");
14533 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14534}
14535
14536
14537/** Opcode 0xda !11/7. */
14538FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14539{
14540 IEMOP_MNEMONIC("fidivr m32i");
14541 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14542}
14543
14544
14545/** Opcode 0xda. */
14546FNIEMOP_DEF(iemOp_EscF2)
14547{
14548 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14551 {
14552 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14553 {
14554 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14555 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14556 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14557 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14558 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14559 case 5:
14560 if (bRm == 0xe9)
14561 return FNIEMOP_CALL(iemOp_fucompp);
14562 return IEMOP_RAISE_INVALID_OPCODE();
14563 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14564 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14566 }
14567 }
14568 else
14569 {
14570 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14571 {
14572 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14573 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14574 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14575 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14576 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14577 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14578 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14579 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14581 }
14582 }
14583}
14584
14585
14586/** Opcode 0xdb !11/0. */
14587FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14588{
14589 IEMOP_MNEMONIC("fild m32i");
14590
14591 IEM_MC_BEGIN(2, 3);
14592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14593 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14594 IEM_MC_LOCAL(int32_t, i32Val);
14595 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14596 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14597
14598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14600
14601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14602 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14603 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14604
14605 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14606 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14607 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14608 IEM_MC_ELSE()
14609 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14610 IEM_MC_ENDIF();
14611 IEM_MC_USED_FPU();
14612 IEM_MC_ADVANCE_RIP();
14613
14614 IEM_MC_END();
14615 return VINF_SUCCESS;
14616}
14617
14618
14619/** Opcode 0xdb !11/1. */
14620FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14621{
14622 IEMOP_MNEMONIC("fisttp m32i");
14623 IEM_MC_BEGIN(3, 2);
14624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14625 IEM_MC_LOCAL(uint16_t, u16Fsw);
14626 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14627 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14628 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14629
14630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14634
14635 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14636 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14637 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14638 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14639 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14640 IEM_MC_ELSE()
14641 IEM_MC_IF_FCW_IM()
14642 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14643 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14644 IEM_MC_ENDIF();
14645 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14646 IEM_MC_ENDIF();
14647 IEM_MC_USED_FPU();
14648 IEM_MC_ADVANCE_RIP();
14649
14650 IEM_MC_END();
14651 return VINF_SUCCESS;
14652}
14653
14654
14655/** Opcode 0xdb !11/2. */
14656FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14657{
14658 IEMOP_MNEMONIC("fist m32i");
14659 IEM_MC_BEGIN(3, 2);
14660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14661 IEM_MC_LOCAL(uint16_t, u16Fsw);
14662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14663 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14665
14666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14670
14671 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14672 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14673 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14674 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14675 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14676 IEM_MC_ELSE()
14677 IEM_MC_IF_FCW_IM()
14678 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14679 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14680 IEM_MC_ENDIF();
14681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14682 IEM_MC_ENDIF();
14683 IEM_MC_USED_FPU();
14684 IEM_MC_ADVANCE_RIP();
14685
14686 IEM_MC_END();
14687 return VINF_SUCCESS;
14688}
14689
14690
14691/** Opcode 0xdb !11/3. */
14692FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14693{
14694 IEMOP_MNEMONIC("fisttp m32i");
14695 IEM_MC_BEGIN(3, 2);
14696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14697 IEM_MC_LOCAL(uint16_t, u16Fsw);
14698 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14699 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14700 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14701
14702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14706
14707 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14709 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14710 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14711 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14712 IEM_MC_ELSE()
14713 IEM_MC_IF_FCW_IM()
14714 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14715 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14716 IEM_MC_ENDIF();
14717 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14718 IEM_MC_ENDIF();
14719 IEM_MC_USED_FPU();
14720 IEM_MC_ADVANCE_RIP();
14721
14722 IEM_MC_END();
14723 return VINF_SUCCESS;
14724}
14725
14726
14727/** Opcode 0xdb !11/5. */
14728FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14729{
14730 IEMOP_MNEMONIC("fld m80r");
14731
14732 IEM_MC_BEGIN(2, 3);
14733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14734 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14735 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14736 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14737 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14738
14739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14741
14742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14744 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14745
14746 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14747 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14748 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14749 IEM_MC_ELSE()
14750 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14751 IEM_MC_ENDIF();
14752 IEM_MC_USED_FPU();
14753 IEM_MC_ADVANCE_RIP();
14754
14755 IEM_MC_END();
14756 return VINF_SUCCESS;
14757}
14758
14759
14760/** Opcode 0xdb !11/7. */
14761FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14762{
14763 IEMOP_MNEMONIC("fstp m80r");
14764 IEM_MC_BEGIN(3, 2);
14765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14766 IEM_MC_LOCAL(uint16_t, u16Fsw);
14767 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14768 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14770
14771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14775
14776 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14777 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14778 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14779 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14780 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14781 IEM_MC_ELSE()
14782 IEM_MC_IF_FCW_IM()
14783 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14784 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14785 IEM_MC_ENDIF();
14786 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14787 IEM_MC_ENDIF();
14788 IEM_MC_USED_FPU();
14789 IEM_MC_ADVANCE_RIP();
14790
14791 IEM_MC_END();
14792 return VINF_SUCCESS;
14793}
14794
14795
14796/** Opcode 0xdb 11/0. */
14797FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14798{
14799 IEMOP_MNEMONIC("fcmovnb st0,stN");
14800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14801
14802 IEM_MC_BEGIN(0, 1);
14803 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14804
14805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14807
14808 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14809 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14810 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14811 IEM_MC_ENDIF();
14812 IEM_MC_UPDATE_FPU_OPCODE_IP();
14813 IEM_MC_ELSE()
14814 IEM_MC_FPU_STACK_UNDERFLOW(0);
14815 IEM_MC_ENDIF();
14816 IEM_MC_USED_FPU();
14817 IEM_MC_ADVANCE_RIP();
14818
14819 IEM_MC_END();
14820 return VINF_SUCCESS;
14821}
14822
14823
14824/** Opcode 0xdb 11/1. */
14825FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14826{
14827 IEMOP_MNEMONIC("fcmovne st0,stN");
14828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14829
14830 IEM_MC_BEGIN(0, 1);
14831 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14832
14833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14835
14836 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14837 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14838 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14839 IEM_MC_ENDIF();
14840 IEM_MC_UPDATE_FPU_OPCODE_IP();
14841 IEM_MC_ELSE()
14842 IEM_MC_FPU_STACK_UNDERFLOW(0);
14843 IEM_MC_ENDIF();
14844 IEM_MC_USED_FPU();
14845 IEM_MC_ADVANCE_RIP();
14846
14847 IEM_MC_END();
14848 return VINF_SUCCESS;
14849}
14850
14851
14852/** Opcode 0xdb 11/2. */
14853FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14854{
14855 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14857
14858 IEM_MC_BEGIN(0, 1);
14859 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14860
14861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14863
14864 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14865 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14866 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14867 IEM_MC_ENDIF();
14868 IEM_MC_UPDATE_FPU_OPCODE_IP();
14869 IEM_MC_ELSE()
14870 IEM_MC_FPU_STACK_UNDERFLOW(0);
14871 IEM_MC_ENDIF();
14872 IEM_MC_USED_FPU();
14873 IEM_MC_ADVANCE_RIP();
14874
14875 IEM_MC_END();
14876 return VINF_SUCCESS;
14877}
14878
14879
14880/** Opcode 0xdb 11/3. */
14881FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14882{
14883 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14885
14886 IEM_MC_BEGIN(0, 1);
14887 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14888
14889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14891
14892 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14893 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14894 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14895 IEM_MC_ENDIF();
14896 IEM_MC_UPDATE_FPU_OPCODE_IP();
14897 IEM_MC_ELSE()
14898 IEM_MC_FPU_STACK_UNDERFLOW(0);
14899 IEM_MC_ENDIF();
14900 IEM_MC_USED_FPU();
14901 IEM_MC_ADVANCE_RIP();
14902
14903 IEM_MC_END();
14904 return VINF_SUCCESS;
14905}
14906
14907
14908/** Opcode 0xdb 0xe0. */
14909FNIEMOP_DEF(iemOp_fneni)
14910{
14911 IEMOP_MNEMONIC("fneni (8087/ign)");
14912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14913 IEM_MC_BEGIN(0,0);
14914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14915 IEM_MC_ADVANCE_RIP();
14916 IEM_MC_END();
14917 return VINF_SUCCESS;
14918}
14919
14920
14921/** Opcode 0xdb 0xe1. */
14922FNIEMOP_DEF(iemOp_fndisi)
14923{
14924 IEMOP_MNEMONIC("fndisi (8087/ign)");
14925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14926 IEM_MC_BEGIN(0,0);
14927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14928 IEM_MC_ADVANCE_RIP();
14929 IEM_MC_END();
14930 return VINF_SUCCESS;
14931}
14932
14933
14934/** Opcode 0xdb 0xe2. */
14935FNIEMOP_DEF(iemOp_fnclex)
14936{
14937 IEMOP_MNEMONIC("fnclex");
14938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14939
14940 IEM_MC_BEGIN(0,0);
14941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14942 IEM_MC_CLEAR_FSW_EX();
14943 IEM_MC_ADVANCE_RIP();
14944 IEM_MC_END();
14945 return VINF_SUCCESS;
14946}
14947
14948
14949/** Opcode 0xdb 0xe3. */
14950FNIEMOP_DEF(iemOp_fninit)
14951{
14952 IEMOP_MNEMONIC("fninit");
14953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14954 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14955}
14956
14957
14958/** Opcode 0xdb 0xe4. */
14959FNIEMOP_DEF(iemOp_fnsetpm)
14960{
14961 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14963 IEM_MC_BEGIN(0,0);
14964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14965 IEM_MC_ADVANCE_RIP();
14966 IEM_MC_END();
14967 return VINF_SUCCESS;
14968}
14969
14970
14971/** Opcode 0xdb 0xe5. */
14972FNIEMOP_DEF(iemOp_frstpm)
14973{
14974 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14975#if 0 /* #UDs on newer CPUs */
14976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14977 IEM_MC_BEGIN(0,0);
14978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14979 IEM_MC_ADVANCE_RIP();
14980 IEM_MC_END();
14981 return VINF_SUCCESS;
14982#else
14983 return IEMOP_RAISE_INVALID_OPCODE();
14984#endif
14985}
14986
14987
14988/** Opcode 0xdb 11/5. */
14989FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14990{
14991 IEMOP_MNEMONIC("fucomi st0,stN");
14992 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14993}
14994
14995
14996/** Opcode 0xdb 11/6. */
14997FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14998{
14999 IEMOP_MNEMONIC("fcomi st0,stN");
15000 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15001}
15002
15003
15004/** Opcode 0xdb. */
15005FNIEMOP_DEF(iemOp_EscF3)
15006{
15007 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15010 {
15011 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15012 {
15013 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15014 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15015 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15016 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15017 case 4:
15018 switch (bRm)
15019 {
15020 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15021 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15022 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15023 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15024 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15025 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15026 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15027 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15029 }
15030 break;
15031 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15032 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15033 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15035 }
15036 }
15037 else
15038 {
15039 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15040 {
15041 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15042 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15043 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15044 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15045 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15046 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15047 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15048 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15050 }
15051 }
15052}
15053
15054
15055/**
15056 * Common worker for FPU instructions working on STn and ST0, and storing the
15057 * result in STn unless IE, DE or ZE was raised.
15058 *
15059 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15060 */
15061FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15062{
15063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15064
15065 IEM_MC_BEGIN(3, 1);
15066 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15067 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15069 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15070
15071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15073
15074 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15075 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15076 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15077 IEM_MC_ELSE()
15078 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15079 IEM_MC_ENDIF();
15080 IEM_MC_USED_FPU();
15081 IEM_MC_ADVANCE_RIP();
15082
15083 IEM_MC_END();
15084 return VINF_SUCCESS;
15085}
15086
15087
15088/** Opcode 0xdc 11/0. */
15089FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15090{
15091 IEMOP_MNEMONIC("fadd stN,st0");
15092 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15093}
15094
15095
15096/** Opcode 0xdc 11/1. */
15097FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15098{
15099 IEMOP_MNEMONIC("fmul stN,st0");
15100 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15101}
15102
15103
15104/** Opcode 0xdc 11/4. */
15105FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15106{
15107 IEMOP_MNEMONIC("fsubr stN,st0");
15108 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15109}
15110
15111
15112/** Opcode 0xdc 11/5. */
15113FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15114{
15115 IEMOP_MNEMONIC("fsub stN,st0");
15116 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15117}
15118
15119
15120/** Opcode 0xdc 11/6. */
15121FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15122{
15123 IEMOP_MNEMONIC("fdivr stN,st0");
15124 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15125}
15126
15127
15128/** Opcode 0xdc 11/7. */
15129FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15130{
15131 IEMOP_MNEMONIC("fdiv stN,st0");
15132 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15133}
15134
15135
15136/**
15137 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15138 * memory operand, and storing the result in ST0.
15139 *
15140 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15141 */
15142FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15143{
15144 IEM_MC_BEGIN(3, 3);
15145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15146 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15147 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15148 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15149 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15150 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15151
15152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15154 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15155 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15156
15157 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15158 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15159 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15160 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15161 IEM_MC_ELSE()
15162 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15163 IEM_MC_ENDIF();
15164 IEM_MC_USED_FPU();
15165 IEM_MC_ADVANCE_RIP();
15166
15167 IEM_MC_END();
15168 return VINF_SUCCESS;
15169}
15170
15171
15172/** Opcode 0xdc !11/0. */
15173FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15174{
15175 IEMOP_MNEMONIC("fadd m64r");
15176 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15177}
15178
15179
15180/** Opcode 0xdc !11/1. */
15181FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15182{
15183 IEMOP_MNEMONIC("fmul m64r");
15184 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15185}
15186
15187
15188/** Opcode 0xdc !11/2. */
15189FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15190{
15191 IEMOP_MNEMONIC("fcom st0,m64r");
15192
15193 IEM_MC_BEGIN(3, 3);
15194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15195 IEM_MC_LOCAL(uint16_t, u16Fsw);
15196 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15197 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15198 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15199 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15200
15201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15203
15204 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15205 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15206 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15207
15208 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15209 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15210 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15211 IEM_MC_ELSE()
15212 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15213 IEM_MC_ENDIF();
15214 IEM_MC_USED_FPU();
15215 IEM_MC_ADVANCE_RIP();
15216
15217 IEM_MC_END();
15218 return VINF_SUCCESS;
15219}
15220
15221
15222/** Opcode 0xdc !11/3. */
15223FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15224{
15225 IEMOP_MNEMONIC("fcomp st0,m64r");
15226
15227 IEM_MC_BEGIN(3, 3);
15228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15229 IEM_MC_LOCAL(uint16_t, u16Fsw);
15230 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15231 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15232 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15233 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15234
15235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15237
15238 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15239 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15240 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15241
15242 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15243 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15244 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15245 IEM_MC_ELSE()
15246 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15247 IEM_MC_ENDIF();
15248 IEM_MC_USED_FPU();
15249 IEM_MC_ADVANCE_RIP();
15250
15251 IEM_MC_END();
15252 return VINF_SUCCESS;
15253}
15254
15255
15256/** Opcode 0xdc !11/4. */
15257FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15258{
15259 IEMOP_MNEMONIC("fsub m64r");
15260 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15261}
15262
15263
15264/** Opcode 0xdc !11/5. */
15265FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15266{
15267 IEMOP_MNEMONIC("fsubr m64r");
15268 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15269}
15270
15271
15272/** Opcode 0xdc !11/6. */
15273FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15274{
15275 IEMOP_MNEMONIC("fdiv m64r");
15276 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15277}
15278
15279
15280/** Opcode 0xdc !11/7. */
15281FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15282{
15283 IEMOP_MNEMONIC("fdivr m64r");
15284 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15285}
15286
15287
15288/** Opcode 0xdc. */
15289FNIEMOP_DEF(iemOp_EscF4)
15290{
15291 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15294 {
15295 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15296 {
15297 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15298 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15299 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15300 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15301 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15302 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15303 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15304 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15306 }
15307 }
15308 else
15309 {
15310 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15311 {
15312 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15313 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15314 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15315 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15316 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15317 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15318 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15319 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15321 }
15322 }
15323}
15324
15325
15326/** Opcode 0xdd !11/0.
15327 * @sa iemOp_fld_m32r */
15328FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15329{
15330 IEMOP_MNEMONIC("fld m64r");
15331
15332 IEM_MC_BEGIN(2, 3);
15333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15334 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15335 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15336 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15338
15339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15343
15344 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15345 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15346 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15347 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15348 IEM_MC_ELSE()
15349 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15350 IEM_MC_ENDIF();
15351 IEM_MC_USED_FPU();
15352 IEM_MC_ADVANCE_RIP();
15353
15354 IEM_MC_END();
15355 return VINF_SUCCESS;
15356}
15357
15358
15359/** Opcode 0xdd !11/0. */
15360FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15361{
15362 IEMOP_MNEMONIC("fisttp m64i");
15363 IEM_MC_BEGIN(3, 2);
15364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15365 IEM_MC_LOCAL(uint16_t, u16Fsw);
15366 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15367 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15368 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15369
15370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15373 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15374
15375 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15378 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15379 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15380 IEM_MC_ELSE()
15381 IEM_MC_IF_FCW_IM()
15382 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15383 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15384 IEM_MC_ENDIF();
15385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15386 IEM_MC_ENDIF();
15387 IEM_MC_USED_FPU();
15388 IEM_MC_ADVANCE_RIP();
15389
15390 IEM_MC_END();
15391 return VINF_SUCCESS;
15392}
15393
15394
15395/** Opcode 0xdd !11/0. */
15396FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15397{
15398 IEMOP_MNEMONIC("fst m64r");
15399 IEM_MC_BEGIN(3, 2);
15400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15401 IEM_MC_LOCAL(uint16_t, u16Fsw);
15402 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15403 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15404 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15405
15406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15409 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15410
15411 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15412 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15413 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15414 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15415 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15416 IEM_MC_ELSE()
15417 IEM_MC_IF_FCW_IM()
15418 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15419 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15420 IEM_MC_ENDIF();
15421 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15422 IEM_MC_ENDIF();
15423 IEM_MC_USED_FPU();
15424 IEM_MC_ADVANCE_RIP();
15425
15426 IEM_MC_END();
15427 return VINF_SUCCESS;
15428}
15429
15430
15431
15432
15433/** Opcode 0xdd !11/0. */
15434FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15435{
15436 IEMOP_MNEMONIC("fstp m64r");
15437 IEM_MC_BEGIN(3, 2);
15438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15439 IEM_MC_LOCAL(uint16_t, u16Fsw);
15440 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15441 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15442 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15443
15444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15446 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15447 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15448
15449 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15450 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15451 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15452 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15453 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15454 IEM_MC_ELSE()
15455 IEM_MC_IF_FCW_IM()
15456 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15457 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15458 IEM_MC_ENDIF();
15459 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15460 IEM_MC_ENDIF();
15461 IEM_MC_USED_FPU();
15462 IEM_MC_ADVANCE_RIP();
15463
15464 IEM_MC_END();
15465 return VINF_SUCCESS;
15466}
15467
15468
15469/** Opcode 0xdd !11/0. */
15470FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15471{
15472 IEMOP_MNEMONIC("frstor m94/108byte");
15473 IEM_MC_BEGIN(3, 0);
15474 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15475 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15476 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15480 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15481 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15482 IEM_MC_END();
15483 return VINF_SUCCESS;
15484}
15485
15486
15487/** Opcode 0xdd !11/0. */
15488FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC("fnsave m94/108byte");
15491 IEM_MC_BEGIN(3, 0);
15492 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15493 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15494 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15498 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15499 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15500 IEM_MC_END();
15501 return VINF_SUCCESS;
15502
15503}
15504
15505/** Opcode 0xdd !11/0. */
15506FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15507{
15508 IEMOP_MNEMONIC("fnstsw m16");
15509
15510 IEM_MC_BEGIN(0, 2);
15511 IEM_MC_LOCAL(uint16_t, u16Tmp);
15512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15513
15514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15516 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15517
15518 IEM_MC_FETCH_FSW(u16Tmp);
15519 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15520 IEM_MC_ADVANCE_RIP();
15521
15522/** @todo Debug / drop a hint to the verifier that things may differ
15523 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15524 * NT4SP1. (X86_FSW_PE) */
15525 IEM_MC_END();
15526 return VINF_SUCCESS;
15527}
15528
15529
15530/** Opcode 0xdd 11/0. */
15531FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15532{
15533 IEMOP_MNEMONIC("ffree stN");
15534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15535 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15536 unmodified. */
15537
15538 IEM_MC_BEGIN(0, 0);
15539
15540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15542
15543 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15544 IEM_MC_UPDATE_FPU_OPCODE_IP();
15545
15546 IEM_MC_USED_FPU();
15547 IEM_MC_ADVANCE_RIP();
15548 IEM_MC_END();
15549 return VINF_SUCCESS;
15550}
15551
15552
15553/** Opcode 0xdd 11/1. */
15554FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15555{
15556 IEMOP_MNEMONIC("fst st0,stN");
15557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15558
15559 IEM_MC_BEGIN(0, 2);
15560 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15561 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15563 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15564 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15565 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15566 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15567 IEM_MC_ELSE()
15568 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15569 IEM_MC_ENDIF();
15570 IEM_MC_USED_FPU();
15571 IEM_MC_ADVANCE_RIP();
15572 IEM_MC_END();
15573 return VINF_SUCCESS;
15574}
15575
15576
15577/** Opcode 0xdd 11/3. */
15578FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15579{
15580 IEMOP_MNEMONIC("fcom st0,stN");
15581 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15582}
15583
15584
15585/** Opcode 0xdd 11/4. */
15586FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15587{
15588 IEMOP_MNEMONIC("fcomp st0,stN");
15589 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15590}
15591
15592
15593/** Opcode 0xdd. */
15594FNIEMOP_DEF(iemOp_EscF5)
15595{
15596 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15599 {
15600 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15601 {
15602 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15603 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15604 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15605 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15606 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15607 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15608 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15609 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15611 }
15612 }
15613 else
15614 {
15615 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15616 {
15617 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15618 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15619 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15620 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15621 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15622 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15623 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15624 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15626 }
15627 }
15628}
15629
15630
15631/** Opcode 0xde 11/0. */
15632FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15633{
15634 IEMOP_MNEMONIC("faddp stN,st0");
15635 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15636}
15637
15638
15639/** Opcode 0xde 11/0. */
15640FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15641{
15642 IEMOP_MNEMONIC("fmulp stN,st0");
15643 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15644}
15645
15646
15647/** Opcode 0xde 0xd9. */
15648FNIEMOP_DEF(iemOp_fcompp)
15649{
15650 IEMOP_MNEMONIC("fucompp st0,stN");
15651 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15652}
15653
15654
15655/** Opcode 0xde 11/4. */
15656FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15657{
15658 IEMOP_MNEMONIC("fsubrp stN,st0");
15659 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15660}
15661
15662
15663/** Opcode 0xde 11/5. */
15664FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15665{
15666 IEMOP_MNEMONIC("fsubp stN,st0");
15667 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15668}
15669
15670
15671/** Opcode 0xde 11/6. */
15672FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15673{
15674 IEMOP_MNEMONIC("fdivrp stN,st0");
15675 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15676}
15677
15678
15679/** Opcode 0xde 11/7. */
15680FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15681{
15682 IEMOP_MNEMONIC("fdivp stN,st0");
15683 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15684}
15685
15686
15687/**
15688 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15689 * the result in ST0.
15690 *
15691 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15692 */
15693FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15694{
15695 IEM_MC_BEGIN(3, 3);
15696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15697 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15698 IEM_MC_LOCAL(int16_t, i16Val2);
15699 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15700 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15701 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15702
15703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15705
15706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15708 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15709
15710 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15711 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15712 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15713 IEM_MC_ELSE()
15714 IEM_MC_FPU_STACK_UNDERFLOW(0);
15715 IEM_MC_ENDIF();
15716 IEM_MC_USED_FPU();
15717 IEM_MC_ADVANCE_RIP();
15718
15719 IEM_MC_END();
15720 return VINF_SUCCESS;
15721}
15722
15723
15724/** Opcode 0xde !11/0. */
15725FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15726{
15727 IEMOP_MNEMONIC("fiadd m16i");
15728 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15729}
15730
15731
15732/** Opcode 0xde !11/1. */
15733FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15734{
15735 IEMOP_MNEMONIC("fimul m16i");
15736 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15737}
15738
15739
15740/** Opcode 0xde !11/2. */
15741FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15742{
15743 IEMOP_MNEMONIC("ficom st0,m16i");
15744
15745 IEM_MC_BEGIN(3, 3);
15746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15747 IEM_MC_LOCAL(uint16_t, u16Fsw);
15748 IEM_MC_LOCAL(int16_t, i16Val2);
15749 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15751 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15752
15753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15755
15756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15758 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15759
15760 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15761 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15762 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15763 IEM_MC_ELSE()
15764 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15765 IEM_MC_ENDIF();
15766 IEM_MC_USED_FPU();
15767 IEM_MC_ADVANCE_RIP();
15768
15769 IEM_MC_END();
15770 return VINF_SUCCESS;
15771}
15772
15773
15774/** Opcode 0xde !11/3. */
15775FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15776{
15777 IEMOP_MNEMONIC("ficomp st0,m16i");
15778
15779 IEM_MC_BEGIN(3, 3);
15780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15781 IEM_MC_LOCAL(uint16_t, u16Fsw);
15782 IEM_MC_LOCAL(int16_t, i16Val2);
15783 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15785 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15786
15787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15789
15790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15792 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15793
15794 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15795 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15796 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15797 IEM_MC_ELSE()
15798 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15799 IEM_MC_ENDIF();
15800 IEM_MC_USED_FPU();
15801 IEM_MC_ADVANCE_RIP();
15802
15803 IEM_MC_END();
15804 return VINF_SUCCESS;
15805}
15806
15807
15808/** Opcode 0xde !11/4. */
15809FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15810{
15811 IEMOP_MNEMONIC("fisub m16i");
15812 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15813}
15814
15815
15816/** Opcode 0xde !11/5. */
15817FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15818{
15819 IEMOP_MNEMONIC("fisubr m16i");
15820 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15821}
15822
15823
15824/** Opcode 0xde !11/6. */
15825FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15826{
15827 IEMOP_MNEMONIC("fiadd m16i");
15828 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15829}
15830
15831
15832/** Opcode 0xde !11/7. */
15833FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15834{
15835 IEMOP_MNEMONIC("fiadd m16i");
15836 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15837}
15838
15839
15840/** Opcode 0xde. */
15841FNIEMOP_DEF(iemOp_EscF6)
15842{
15843 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15846 {
15847 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15848 {
15849 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15850 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15851 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15852 case 3: if (bRm == 0xd9)
15853 return FNIEMOP_CALL(iemOp_fcompp);
15854 return IEMOP_RAISE_INVALID_OPCODE();
15855 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15856 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15857 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15858 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15860 }
15861 }
15862 else
15863 {
15864 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15865 {
15866 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15867 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15868 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15869 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15870 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15871 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15872 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15873 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15875 }
15876 }
15877}
15878
15879
15880/** Opcode 0xdf 11/0.
15881 * Undocument instruction, assumed to work like ffree + fincstp. */
15882FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15883{
15884 IEMOP_MNEMONIC("ffreep stN");
15885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15886
15887 IEM_MC_BEGIN(0, 0);
15888
15889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15891
15892 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15893 IEM_MC_FPU_STACK_INC_TOP();
15894 IEM_MC_UPDATE_FPU_OPCODE_IP();
15895
15896 IEM_MC_USED_FPU();
15897 IEM_MC_ADVANCE_RIP();
15898 IEM_MC_END();
15899 return VINF_SUCCESS;
15900}
15901
15902
15903/** Opcode 0xdf 0xe0. */
15904FNIEMOP_DEF(iemOp_fnstsw_ax)
15905{
15906 IEMOP_MNEMONIC("fnstsw ax");
15907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15908
15909 IEM_MC_BEGIN(0, 1);
15910 IEM_MC_LOCAL(uint16_t, u16Tmp);
15911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15912 IEM_MC_FETCH_FSW(u16Tmp);
15913 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15914 IEM_MC_ADVANCE_RIP();
15915 IEM_MC_END();
15916 return VINF_SUCCESS;
15917}
15918
15919
15920/** Opcode 0xdf 11/5. */
15921FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15922{
15923 IEMOP_MNEMONIC("fcomip st0,stN");
15924 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15925}
15926
15927
15928/** Opcode 0xdf 11/6. */
15929FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15930{
15931 IEMOP_MNEMONIC("fcomip st0,stN");
15932 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15933}
15934
15935
15936/** Opcode 0xdf !11/0. */
15937FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15938{
15939 IEMOP_MNEMONIC("fild m16i");
15940
15941 IEM_MC_BEGIN(2, 3);
15942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15943 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15944 IEM_MC_LOCAL(int16_t, i16Val);
15945 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15946 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15947
15948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15950
15951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15953 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15954
15955 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15956 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15957 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15958 IEM_MC_ELSE()
15959 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15960 IEM_MC_ENDIF();
15961 IEM_MC_USED_FPU();
15962 IEM_MC_ADVANCE_RIP();
15963
15964 IEM_MC_END();
15965 return VINF_SUCCESS;
15966}
15967
15968
15969/** Opcode 0xdf !11/1. */
15970FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15971{
15972 IEMOP_MNEMONIC("fisttp m16i");
15973 IEM_MC_BEGIN(3, 2);
15974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15975 IEM_MC_LOCAL(uint16_t, u16Fsw);
15976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15977 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15979
15980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15984
15985 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15986 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15987 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15988 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15989 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15990 IEM_MC_ELSE()
15991 IEM_MC_IF_FCW_IM()
15992 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15993 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15994 IEM_MC_ENDIF();
15995 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15996 IEM_MC_ENDIF();
15997 IEM_MC_USED_FPU();
15998 IEM_MC_ADVANCE_RIP();
15999
16000 IEM_MC_END();
16001 return VINF_SUCCESS;
16002}
16003
16004
16005/** Opcode 0xdf !11/2. */
16006FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16007{
16008 IEMOP_MNEMONIC("fistp m16i");
16009 IEM_MC_BEGIN(3, 2);
16010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16011 IEM_MC_LOCAL(uint16_t, u16Fsw);
16012 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16013 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16014 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16015
16016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16019 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16020
16021 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16023 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16024 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16025 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16026 IEM_MC_ELSE()
16027 IEM_MC_IF_FCW_IM()
16028 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16029 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16030 IEM_MC_ENDIF();
16031 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16032 IEM_MC_ENDIF();
16033 IEM_MC_USED_FPU();
16034 IEM_MC_ADVANCE_RIP();
16035
16036 IEM_MC_END();
16037 return VINF_SUCCESS;
16038}
16039
16040
16041/** Opcode 0xdf !11/3. */
16042FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16043{
16044 IEMOP_MNEMONIC("fistp m16i");
16045 IEM_MC_BEGIN(3, 2);
16046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16047 IEM_MC_LOCAL(uint16_t, u16Fsw);
16048 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16049 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16050 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16051
16052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16056
16057 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16058 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16059 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16060 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16061 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16062 IEM_MC_ELSE()
16063 IEM_MC_IF_FCW_IM()
16064 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16065 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16066 IEM_MC_ENDIF();
16067 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16068 IEM_MC_ENDIF();
16069 IEM_MC_USED_FPU();
16070 IEM_MC_ADVANCE_RIP();
16071
16072 IEM_MC_END();
16073 return VINF_SUCCESS;
16074}
16075
16076
16077/** Opcode 0xdf !11/4. */
16078FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16079
16080
16081/** Opcode 0xdf !11/5. */
16082FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16083{
16084 IEMOP_MNEMONIC("fild m64i");
16085
16086 IEM_MC_BEGIN(2, 3);
16087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16088 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16089 IEM_MC_LOCAL(int64_t, i64Val);
16090 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16091 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16092
16093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16095
16096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16098 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16099
16100 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16101 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16102 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16103 IEM_MC_ELSE()
16104 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16105 IEM_MC_ENDIF();
16106 IEM_MC_USED_FPU();
16107 IEM_MC_ADVANCE_RIP();
16108
16109 IEM_MC_END();
16110 return VINF_SUCCESS;
16111}
16112
16113
16114/** Opcode 0xdf !11/6. */
16115FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16116
16117
16118/** Opcode 0xdf !11/7. */
16119FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16120{
16121 IEMOP_MNEMONIC("fistp m64i");
16122 IEM_MC_BEGIN(3, 2);
16123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16124 IEM_MC_LOCAL(uint16_t, u16Fsw);
16125 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16126 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16127 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16128
16129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16133
16134 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16136 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16137 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16138 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16139 IEM_MC_ELSE()
16140 IEM_MC_IF_FCW_IM()
16141 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16142 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16143 IEM_MC_ENDIF();
16144 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16145 IEM_MC_ENDIF();
16146 IEM_MC_USED_FPU();
16147 IEM_MC_ADVANCE_RIP();
16148
16149 IEM_MC_END();
16150 return VINF_SUCCESS;
16151}
16152
16153
16154/** Opcode 0xdf. */
16155FNIEMOP_DEF(iemOp_EscF7)
16156{
16157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16158 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16159 {
16160 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16161 {
16162 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16163 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16164 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16165 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16166 case 4: if (bRm == 0xe0)
16167 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16168 return IEMOP_RAISE_INVALID_OPCODE();
16169 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16170 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16171 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16173 }
16174 }
16175 else
16176 {
16177 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16178 {
16179 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16180 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16181 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16182 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16183 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16184 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16185 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16186 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16188 }
16189 }
16190}
16191
16192
16193/** Opcode 0xe0. */
16194FNIEMOP_DEF(iemOp_loopne_Jb)
16195{
16196 IEMOP_MNEMONIC("loopne Jb");
16197 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16198 IEMOP_HLP_NO_LOCK_PREFIX();
16199 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16200
16201 switch (pIemCpu->enmEffAddrMode)
16202 {
16203 case IEMMODE_16BIT:
16204 IEM_MC_BEGIN(0,0);
16205 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16206 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16207 IEM_MC_REL_JMP_S8(i8Imm);
16208 } IEM_MC_ELSE() {
16209 IEM_MC_ADVANCE_RIP();
16210 } IEM_MC_ENDIF();
16211 IEM_MC_END();
16212 return VINF_SUCCESS;
16213
16214 case IEMMODE_32BIT:
16215 IEM_MC_BEGIN(0,0);
16216 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16217 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16218 IEM_MC_REL_JMP_S8(i8Imm);
16219 } IEM_MC_ELSE() {
16220 IEM_MC_ADVANCE_RIP();
16221 } IEM_MC_ENDIF();
16222 IEM_MC_END();
16223 return VINF_SUCCESS;
16224
16225 case IEMMODE_64BIT:
16226 IEM_MC_BEGIN(0,0);
16227 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16228 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16229 IEM_MC_REL_JMP_S8(i8Imm);
16230 } IEM_MC_ELSE() {
16231 IEM_MC_ADVANCE_RIP();
16232 } IEM_MC_ENDIF();
16233 IEM_MC_END();
16234 return VINF_SUCCESS;
16235
16236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16237 }
16238}
16239
16240
16241/** Opcode 0xe1. */
16242FNIEMOP_DEF(iemOp_loope_Jb)
16243{
16244 IEMOP_MNEMONIC("loope Jb");
16245 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16246 IEMOP_HLP_NO_LOCK_PREFIX();
16247 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16248
16249 switch (pIemCpu->enmEffAddrMode)
16250 {
16251 case IEMMODE_16BIT:
16252 IEM_MC_BEGIN(0,0);
16253 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16254 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16255 IEM_MC_REL_JMP_S8(i8Imm);
16256 } IEM_MC_ELSE() {
16257 IEM_MC_ADVANCE_RIP();
16258 } IEM_MC_ENDIF();
16259 IEM_MC_END();
16260 return VINF_SUCCESS;
16261
16262 case IEMMODE_32BIT:
16263 IEM_MC_BEGIN(0,0);
16264 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16265 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16266 IEM_MC_REL_JMP_S8(i8Imm);
16267 } IEM_MC_ELSE() {
16268 IEM_MC_ADVANCE_RIP();
16269 } IEM_MC_ENDIF();
16270 IEM_MC_END();
16271 return VINF_SUCCESS;
16272
16273 case IEMMODE_64BIT:
16274 IEM_MC_BEGIN(0,0);
16275 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16276 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16277 IEM_MC_REL_JMP_S8(i8Imm);
16278 } IEM_MC_ELSE() {
16279 IEM_MC_ADVANCE_RIP();
16280 } IEM_MC_ENDIF();
16281 IEM_MC_END();
16282 return VINF_SUCCESS;
16283
16284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16285 }
16286}
16287
16288
16289/** Opcode 0xe2. */
16290FNIEMOP_DEF(iemOp_loop_Jb)
16291{
16292 IEMOP_MNEMONIC("loop Jb");
16293 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16294 IEMOP_HLP_NO_LOCK_PREFIX();
16295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16296
16297 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16298 * using the 32-bit operand size override. How can that be restarted? See
16299 * weird pseudo code in intel manual. */
16300 switch (pIemCpu->enmEffAddrMode)
16301 {
16302 case IEMMODE_16BIT:
16303 IEM_MC_BEGIN(0,0);
16304 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16305 {
16306 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16307 IEM_MC_IF_CX_IS_NZ() {
16308 IEM_MC_REL_JMP_S8(i8Imm);
16309 } IEM_MC_ELSE() {
16310 IEM_MC_ADVANCE_RIP();
16311 } IEM_MC_ENDIF();
16312 }
16313 else
16314 {
16315 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16316 IEM_MC_ADVANCE_RIP();
16317 }
16318 IEM_MC_END();
16319 return VINF_SUCCESS;
16320
16321 case IEMMODE_32BIT:
16322 IEM_MC_BEGIN(0,0);
16323 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16324 {
16325 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16326 IEM_MC_IF_ECX_IS_NZ() {
16327 IEM_MC_REL_JMP_S8(i8Imm);
16328 } IEM_MC_ELSE() {
16329 IEM_MC_ADVANCE_RIP();
16330 } IEM_MC_ENDIF();
16331 }
16332 else
16333 {
16334 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16335 IEM_MC_ADVANCE_RIP();
16336 }
16337 IEM_MC_END();
16338 return VINF_SUCCESS;
16339
16340 case IEMMODE_64BIT:
16341 IEM_MC_BEGIN(0,0);
16342 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16343 {
16344 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16345 IEM_MC_IF_RCX_IS_NZ() {
16346 IEM_MC_REL_JMP_S8(i8Imm);
16347 } IEM_MC_ELSE() {
16348 IEM_MC_ADVANCE_RIP();
16349 } IEM_MC_ENDIF();
16350 }
16351 else
16352 {
16353 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16354 IEM_MC_ADVANCE_RIP();
16355 }
16356 IEM_MC_END();
16357 return VINF_SUCCESS;
16358
16359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16360 }
16361}
16362
16363
16364/** Opcode 0xe3. */
16365FNIEMOP_DEF(iemOp_jecxz_Jb)
16366{
16367 IEMOP_MNEMONIC("jecxz Jb");
16368 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16369 IEMOP_HLP_NO_LOCK_PREFIX();
16370 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16371
16372 switch (pIemCpu->enmEffAddrMode)
16373 {
16374 case IEMMODE_16BIT:
16375 IEM_MC_BEGIN(0,0);
16376 IEM_MC_IF_CX_IS_NZ() {
16377 IEM_MC_ADVANCE_RIP();
16378 } IEM_MC_ELSE() {
16379 IEM_MC_REL_JMP_S8(i8Imm);
16380 } IEM_MC_ENDIF();
16381 IEM_MC_END();
16382 return VINF_SUCCESS;
16383
16384 case IEMMODE_32BIT:
16385 IEM_MC_BEGIN(0,0);
16386 IEM_MC_IF_ECX_IS_NZ() {
16387 IEM_MC_ADVANCE_RIP();
16388 } IEM_MC_ELSE() {
16389 IEM_MC_REL_JMP_S8(i8Imm);
16390 } IEM_MC_ENDIF();
16391 IEM_MC_END();
16392 return VINF_SUCCESS;
16393
16394 case IEMMODE_64BIT:
16395 IEM_MC_BEGIN(0,0);
16396 IEM_MC_IF_RCX_IS_NZ() {
16397 IEM_MC_ADVANCE_RIP();
16398 } IEM_MC_ELSE() {
16399 IEM_MC_REL_JMP_S8(i8Imm);
16400 } IEM_MC_ENDIF();
16401 IEM_MC_END();
16402 return VINF_SUCCESS;
16403
16404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16405 }
16406}
16407
16408
16409/** Opcode 0xe4 */
16410FNIEMOP_DEF(iemOp_in_AL_Ib)
16411{
16412 IEMOP_MNEMONIC("in eAX,Ib");
16413 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16414 IEMOP_HLP_NO_LOCK_PREFIX();
16415 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16416}
16417
16418
16419/** Opcode 0xe5 */
16420FNIEMOP_DEF(iemOp_in_eAX_Ib)
16421{
16422 IEMOP_MNEMONIC("in eAX,Ib");
16423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16424 IEMOP_HLP_NO_LOCK_PREFIX();
16425 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16426}
16427
16428
16429/** Opcode 0xe6 */
16430FNIEMOP_DEF(iemOp_out_Ib_AL)
16431{
16432 IEMOP_MNEMONIC("out Ib,AL");
16433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16434 IEMOP_HLP_NO_LOCK_PREFIX();
16435 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16436}
16437
16438
16439/** Opcode 0xe7 */
16440FNIEMOP_DEF(iemOp_out_Ib_eAX)
16441{
16442 IEMOP_MNEMONIC("out Ib,eAX");
16443 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16444 IEMOP_HLP_NO_LOCK_PREFIX();
16445 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16446}
16447
16448
16449/** Opcode 0xe8. */
16450FNIEMOP_DEF(iemOp_call_Jv)
16451{
16452 IEMOP_MNEMONIC("call Jv");
16453 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16454 switch (pIemCpu->enmEffOpSize)
16455 {
16456 case IEMMODE_16BIT:
16457 {
16458 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16459 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16460 }
16461
16462 case IEMMODE_32BIT:
16463 {
16464 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16465 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16466 }
16467
16468 case IEMMODE_64BIT:
16469 {
16470 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16471 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16472 }
16473
16474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16475 }
16476}
16477
16478
16479/** Opcode 0xe9. */
16480FNIEMOP_DEF(iemOp_jmp_Jv)
16481{
16482 IEMOP_MNEMONIC("jmp Jv");
16483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16484 switch (pIemCpu->enmEffOpSize)
16485 {
16486 case IEMMODE_16BIT:
16487 {
16488 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16489 IEM_MC_BEGIN(0, 0);
16490 IEM_MC_REL_JMP_S16(i16Imm);
16491 IEM_MC_END();
16492 return VINF_SUCCESS;
16493 }
16494
16495 case IEMMODE_64BIT:
16496 case IEMMODE_32BIT:
16497 {
16498 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16499 IEM_MC_BEGIN(0, 0);
16500 IEM_MC_REL_JMP_S32(i32Imm);
16501 IEM_MC_END();
16502 return VINF_SUCCESS;
16503 }
16504
16505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16506 }
16507}
16508
16509
16510/** Opcode 0xea. */
16511FNIEMOP_DEF(iemOp_jmp_Ap)
16512{
16513 IEMOP_MNEMONIC("jmp Ap");
16514 IEMOP_HLP_NO_64BIT();
16515
16516 /* Decode the far pointer address and pass it on to the far call C implementation. */
16517 uint32_t offSeg;
16518 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16519 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16520 else
16521 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16522 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16523 IEMOP_HLP_NO_LOCK_PREFIX();
16524 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16525}
16526
16527
16528/** Opcode 0xeb. */
16529FNIEMOP_DEF(iemOp_jmp_Jb)
16530{
16531 IEMOP_MNEMONIC("jmp Jb");
16532 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16533 IEMOP_HLP_NO_LOCK_PREFIX();
16534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16535
16536 IEM_MC_BEGIN(0, 0);
16537 IEM_MC_REL_JMP_S8(i8Imm);
16538 IEM_MC_END();
16539 return VINF_SUCCESS;
16540}
16541
16542
16543/** Opcode 0xec */
16544FNIEMOP_DEF(iemOp_in_AL_DX)
16545{
16546 IEMOP_MNEMONIC("in AL,DX");
16547 IEMOP_HLP_NO_LOCK_PREFIX();
16548 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16549}
16550
16551
16552/** Opcode 0xed */
16553FNIEMOP_DEF(iemOp_eAX_DX)
16554{
16555 IEMOP_MNEMONIC("in eAX,DX");
16556 IEMOP_HLP_NO_LOCK_PREFIX();
16557 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16558}
16559
16560
16561/** Opcode 0xee */
16562FNIEMOP_DEF(iemOp_out_DX_AL)
16563{
16564 IEMOP_MNEMONIC("out DX,AL");
16565 IEMOP_HLP_NO_LOCK_PREFIX();
16566 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16567}
16568
16569
16570/** Opcode 0xef */
16571FNIEMOP_DEF(iemOp_out_DX_eAX)
16572{
16573 IEMOP_MNEMONIC("out DX,eAX");
16574 IEMOP_HLP_NO_LOCK_PREFIX();
16575 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16576}
16577
16578
16579/** Opcode 0xf0. */
16580FNIEMOP_DEF(iemOp_lock)
16581{
16582 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16583 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16584
16585 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16586 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16587}
16588
16589
16590/** Opcode 0xf1. */
16591FNIEMOP_DEF(iemOp_int_1)
16592{
16593 IEMOP_MNEMONIC("int1"); /* icebp */
16594 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16595 /** @todo testcase! */
16596 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16597}
16598
16599
16600/** Opcode 0xf2. */
16601FNIEMOP_DEF(iemOp_repne)
16602{
16603 /* This overrides any previous REPE prefix. */
16604 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16605 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16606 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16607
16608 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16609 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16610}
16611
16612
16613/** Opcode 0xf3. */
16614FNIEMOP_DEF(iemOp_repe)
16615{
16616 /* This overrides any previous REPNE prefix. */
16617 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16618 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16619 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16620
16621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16623}
16624
16625
16626/** Opcode 0xf4. */
16627FNIEMOP_DEF(iemOp_hlt)
16628{
16629 IEMOP_HLP_NO_LOCK_PREFIX();
16630 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16631}
16632
16633
16634/** Opcode 0xf5. */
16635FNIEMOP_DEF(iemOp_cmc)
16636{
16637 IEMOP_MNEMONIC("cmc");
16638 IEMOP_HLP_NO_LOCK_PREFIX();
16639 IEM_MC_BEGIN(0, 0);
16640 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16641 IEM_MC_ADVANCE_RIP();
16642 IEM_MC_END();
16643 return VINF_SUCCESS;
16644}
16645
16646
16647/**
16648 * Common implementation of 'inc/dec/not/neg Eb'.
16649 *
16650 * @param bRm The RM byte.
16651 * @param pImpl The instruction implementation.
16652 */
16653FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16654{
16655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16656 {
16657 /* register access */
16658 IEM_MC_BEGIN(2, 0);
16659 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16660 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16661 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16662 IEM_MC_REF_EFLAGS(pEFlags);
16663 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16664 IEM_MC_ADVANCE_RIP();
16665 IEM_MC_END();
16666 }
16667 else
16668 {
16669 /* memory access. */
16670 IEM_MC_BEGIN(2, 2);
16671 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16674
16675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16676 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16677 IEM_MC_FETCH_EFLAGS(EFlags);
16678 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16679 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16680 else
16681 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16682
16683 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16684 IEM_MC_COMMIT_EFLAGS(EFlags);
16685 IEM_MC_ADVANCE_RIP();
16686 IEM_MC_END();
16687 }
16688 return VINF_SUCCESS;
16689}
16690
16691
16692/**
16693 * Common implementation of 'inc/dec/not/neg Ev'.
16694 *
16695 * @param bRm The RM byte.
16696 * @param pImpl The instruction implementation.
16697 */
16698FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16699{
16700 /* Registers are handled by a common worker. */
16701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16702 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16703
16704 /* Memory we do here. */
16705 switch (pIemCpu->enmEffOpSize)
16706 {
16707 case IEMMODE_16BIT:
16708 IEM_MC_BEGIN(2, 2);
16709 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16710 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16712
16713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16714 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16715 IEM_MC_FETCH_EFLAGS(EFlags);
16716 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16717 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16718 else
16719 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16720
16721 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16722 IEM_MC_COMMIT_EFLAGS(EFlags);
16723 IEM_MC_ADVANCE_RIP();
16724 IEM_MC_END();
16725 return VINF_SUCCESS;
16726
16727 case IEMMODE_32BIT:
16728 IEM_MC_BEGIN(2, 2);
16729 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16730 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16732
16733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16734 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16735 IEM_MC_FETCH_EFLAGS(EFlags);
16736 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16737 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16738 else
16739 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16740
16741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16742 IEM_MC_COMMIT_EFLAGS(EFlags);
16743 IEM_MC_ADVANCE_RIP();
16744 IEM_MC_END();
16745 return VINF_SUCCESS;
16746
16747 case IEMMODE_64BIT:
16748 IEM_MC_BEGIN(2, 2);
16749 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16752
16753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16754 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16755 IEM_MC_FETCH_EFLAGS(EFlags);
16756 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16757 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16758 else
16759 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16760
16761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16762 IEM_MC_COMMIT_EFLAGS(EFlags);
16763 IEM_MC_ADVANCE_RIP();
16764 IEM_MC_END();
16765 return VINF_SUCCESS;
16766
16767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16768 }
16769}
16770
16771
16772/** Opcode 0xf6 /0. */
16773FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16774{
16775 IEMOP_MNEMONIC("test Eb,Ib");
16776 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16777
16778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16779 {
16780 /* register access */
16781 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16782 IEMOP_HLP_NO_LOCK_PREFIX();
16783
16784 IEM_MC_BEGIN(3, 0);
16785 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16786 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16787 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16788 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16789 IEM_MC_REF_EFLAGS(pEFlags);
16790 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16791 IEM_MC_ADVANCE_RIP();
16792 IEM_MC_END();
16793 }
16794 else
16795 {
16796 /* memory access. */
16797 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16798
16799 IEM_MC_BEGIN(3, 2);
16800 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16801 IEM_MC_ARG(uint8_t, u8Src, 1);
16802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16804
16805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16806 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16807 IEM_MC_ASSIGN(u8Src, u8Imm);
16808 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16809 IEM_MC_FETCH_EFLAGS(EFlags);
16810 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16811
16812 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16813 IEM_MC_COMMIT_EFLAGS(EFlags);
16814 IEM_MC_ADVANCE_RIP();
16815 IEM_MC_END();
16816 }
16817 return VINF_SUCCESS;
16818}
16819
16820
16821/** Opcode 0xf7 /0. */
16822FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16823{
16824 IEMOP_MNEMONIC("test Ev,Iv");
16825 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16826 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16827
16828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16829 {
16830 /* register access */
16831 switch (pIemCpu->enmEffOpSize)
16832 {
16833 case IEMMODE_16BIT:
16834 {
16835 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16836 IEM_MC_BEGIN(3, 0);
16837 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16838 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16839 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16840 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16841 IEM_MC_REF_EFLAGS(pEFlags);
16842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16843 IEM_MC_ADVANCE_RIP();
16844 IEM_MC_END();
16845 return VINF_SUCCESS;
16846 }
16847
16848 case IEMMODE_32BIT:
16849 {
16850 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16851 IEM_MC_BEGIN(3, 0);
16852 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16853 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16855 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16856 IEM_MC_REF_EFLAGS(pEFlags);
16857 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16858 /* No clearing the high dword here - test doesn't write back the result. */
16859 IEM_MC_ADVANCE_RIP();
16860 IEM_MC_END();
16861 return VINF_SUCCESS;
16862 }
16863
16864 case IEMMODE_64BIT:
16865 {
16866 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16867 IEM_MC_BEGIN(3, 0);
16868 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16869 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16870 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16871 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16872 IEM_MC_REF_EFLAGS(pEFlags);
16873 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16874 IEM_MC_ADVANCE_RIP();
16875 IEM_MC_END();
16876 return VINF_SUCCESS;
16877 }
16878
16879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16880 }
16881 }
16882 else
16883 {
16884 /* memory access. */
16885 switch (pIemCpu->enmEffOpSize)
16886 {
16887 case IEMMODE_16BIT:
16888 {
16889 IEM_MC_BEGIN(3, 2);
16890 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16891 IEM_MC_ARG(uint16_t, u16Src, 1);
16892 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16894
16895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16896 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16897 IEM_MC_ASSIGN(u16Src, u16Imm);
16898 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16899 IEM_MC_FETCH_EFLAGS(EFlags);
16900 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16901
16902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16903 IEM_MC_COMMIT_EFLAGS(EFlags);
16904 IEM_MC_ADVANCE_RIP();
16905 IEM_MC_END();
16906 return VINF_SUCCESS;
16907 }
16908
16909 case IEMMODE_32BIT:
16910 {
16911 IEM_MC_BEGIN(3, 2);
16912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16913 IEM_MC_ARG(uint32_t, u32Src, 1);
16914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16916
16917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16918 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16919 IEM_MC_ASSIGN(u32Src, u32Imm);
16920 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16921 IEM_MC_FETCH_EFLAGS(EFlags);
16922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16923
16924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16925 IEM_MC_COMMIT_EFLAGS(EFlags);
16926 IEM_MC_ADVANCE_RIP();
16927 IEM_MC_END();
16928 return VINF_SUCCESS;
16929 }
16930
16931 case IEMMODE_64BIT:
16932 {
16933 IEM_MC_BEGIN(3, 2);
16934 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16935 IEM_MC_ARG(uint64_t, u64Src, 1);
16936 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16938
16939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16940 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16941 IEM_MC_ASSIGN(u64Src, u64Imm);
16942 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16943 IEM_MC_FETCH_EFLAGS(EFlags);
16944 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16945
16946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16947 IEM_MC_COMMIT_EFLAGS(EFlags);
16948 IEM_MC_ADVANCE_RIP();
16949 IEM_MC_END();
16950 return VINF_SUCCESS;
16951 }
16952
16953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16954 }
16955 }
16956}
16957
16958
16959/** Opcode 0xf6 /4, /5, /6 and /7. */
16960FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16961{
16962 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16963
16964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16965 {
16966 /* register access */
16967 IEMOP_HLP_NO_LOCK_PREFIX();
16968 IEM_MC_BEGIN(3, 1);
16969 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16970 IEM_MC_ARG(uint8_t, u8Value, 1);
16971 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16972 IEM_MC_LOCAL(int32_t, rc);
16973
16974 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16975 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16976 IEM_MC_REF_EFLAGS(pEFlags);
16977 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16978 IEM_MC_IF_LOCAL_IS_Z(rc) {
16979 IEM_MC_ADVANCE_RIP();
16980 } IEM_MC_ELSE() {
16981 IEM_MC_RAISE_DIVIDE_ERROR();
16982 } IEM_MC_ENDIF();
16983
16984 IEM_MC_END();
16985 }
16986 else
16987 {
16988 /* memory access. */
16989 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16990
16991 IEM_MC_BEGIN(3, 2);
16992 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16993 IEM_MC_ARG(uint8_t, u8Value, 1);
16994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16996 IEM_MC_LOCAL(int32_t, rc);
16997
16998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16999 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
17000 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17001 IEM_MC_REF_EFLAGS(pEFlags);
17002 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17003 IEM_MC_IF_LOCAL_IS_Z(rc) {
17004 IEM_MC_ADVANCE_RIP();
17005 } IEM_MC_ELSE() {
17006 IEM_MC_RAISE_DIVIDE_ERROR();
17007 } IEM_MC_ENDIF();
17008
17009 IEM_MC_END();
17010 }
17011 return VINF_SUCCESS;
17012}
17013
17014
17015/** Opcode 0xf7 /4, /5, /6 and /7. */
17016FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17017{
17018 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17019 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17020
17021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17022 {
17023 /* register access */
17024 switch (pIemCpu->enmEffOpSize)
17025 {
17026 case IEMMODE_16BIT:
17027 {
17028 IEMOP_HLP_NO_LOCK_PREFIX();
17029 IEM_MC_BEGIN(4, 1);
17030 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17031 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17032 IEM_MC_ARG(uint16_t, u16Value, 2);
17033 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17034 IEM_MC_LOCAL(int32_t, rc);
17035
17036 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17037 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17038 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17039 IEM_MC_REF_EFLAGS(pEFlags);
17040 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17041 IEM_MC_IF_LOCAL_IS_Z(rc) {
17042 IEM_MC_ADVANCE_RIP();
17043 } IEM_MC_ELSE() {
17044 IEM_MC_RAISE_DIVIDE_ERROR();
17045 } IEM_MC_ENDIF();
17046
17047 IEM_MC_END();
17048 return VINF_SUCCESS;
17049 }
17050
17051 case IEMMODE_32BIT:
17052 {
17053 IEMOP_HLP_NO_LOCK_PREFIX();
17054 IEM_MC_BEGIN(4, 1);
17055 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17056 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17057 IEM_MC_ARG(uint32_t, u32Value, 2);
17058 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17059 IEM_MC_LOCAL(int32_t, rc);
17060
17061 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17062 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17063 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17064 IEM_MC_REF_EFLAGS(pEFlags);
17065 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17066 IEM_MC_IF_LOCAL_IS_Z(rc) {
17067 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17068 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17069 IEM_MC_ADVANCE_RIP();
17070 } IEM_MC_ELSE() {
17071 IEM_MC_RAISE_DIVIDE_ERROR();
17072 } IEM_MC_ENDIF();
17073
17074 IEM_MC_END();
17075 return VINF_SUCCESS;
17076 }
17077
17078 case IEMMODE_64BIT:
17079 {
17080 IEMOP_HLP_NO_LOCK_PREFIX();
17081 IEM_MC_BEGIN(4, 1);
17082 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17083 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17084 IEM_MC_ARG(uint64_t, u64Value, 2);
17085 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17086 IEM_MC_LOCAL(int32_t, rc);
17087
17088 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17089 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17090 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17091 IEM_MC_REF_EFLAGS(pEFlags);
17092 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17093 IEM_MC_IF_LOCAL_IS_Z(rc) {
17094 IEM_MC_ADVANCE_RIP();
17095 } IEM_MC_ELSE() {
17096 IEM_MC_RAISE_DIVIDE_ERROR();
17097 } IEM_MC_ENDIF();
17098
17099 IEM_MC_END();
17100 return VINF_SUCCESS;
17101 }
17102
17103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17104 }
17105 }
17106 else
17107 {
17108 /* memory access. */
17109 switch (pIemCpu->enmEffOpSize)
17110 {
17111 case IEMMODE_16BIT:
17112 {
17113 IEMOP_HLP_NO_LOCK_PREFIX();
17114 IEM_MC_BEGIN(4, 2);
17115 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17116 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17117 IEM_MC_ARG(uint16_t, u16Value, 2);
17118 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17120 IEM_MC_LOCAL(int32_t, rc);
17121
17122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17123 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17124 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17125 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17126 IEM_MC_REF_EFLAGS(pEFlags);
17127 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17128 IEM_MC_IF_LOCAL_IS_Z(rc) {
17129 IEM_MC_ADVANCE_RIP();
17130 } IEM_MC_ELSE() {
17131 IEM_MC_RAISE_DIVIDE_ERROR();
17132 } IEM_MC_ENDIF();
17133
17134 IEM_MC_END();
17135 return VINF_SUCCESS;
17136 }
17137
17138 case IEMMODE_32BIT:
17139 {
17140 IEMOP_HLP_NO_LOCK_PREFIX();
17141 IEM_MC_BEGIN(4, 2);
17142 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17143 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17144 IEM_MC_ARG(uint32_t, u32Value, 2);
17145 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17147 IEM_MC_LOCAL(int32_t, rc);
17148
17149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17150 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17151 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17152 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17153 IEM_MC_REF_EFLAGS(pEFlags);
17154 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17155 IEM_MC_IF_LOCAL_IS_Z(rc) {
17156 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17157 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17158 IEM_MC_ADVANCE_RIP();
17159 } IEM_MC_ELSE() {
17160 IEM_MC_RAISE_DIVIDE_ERROR();
17161 } IEM_MC_ENDIF();
17162
17163 IEM_MC_END();
17164 return VINF_SUCCESS;
17165 }
17166
17167 case IEMMODE_64BIT:
17168 {
17169 IEMOP_HLP_NO_LOCK_PREFIX();
17170 IEM_MC_BEGIN(4, 2);
17171 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17172 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17173 IEM_MC_ARG(uint64_t, u64Value, 2);
17174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17176 IEM_MC_LOCAL(int32_t, rc);
17177
17178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17179 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17180 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17181 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17182 IEM_MC_REF_EFLAGS(pEFlags);
17183 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17184 IEM_MC_IF_LOCAL_IS_Z(rc) {
17185 IEM_MC_ADVANCE_RIP();
17186 } IEM_MC_ELSE() {
17187 IEM_MC_RAISE_DIVIDE_ERROR();
17188 } IEM_MC_ENDIF();
17189
17190 IEM_MC_END();
17191 return VINF_SUCCESS;
17192 }
17193
17194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17195 }
17196 }
17197}
17198
17199/** Opcode 0xf6. */
17200FNIEMOP_DEF(iemOp_Grp3_Eb)
17201{
17202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17203 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17204 {
17205 case 0:
17206 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17207 case 1:
17208/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17209 return IEMOP_RAISE_INVALID_OPCODE();
17210 case 2:
17211 IEMOP_MNEMONIC("not Eb");
17212 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17213 case 3:
17214 IEMOP_MNEMONIC("neg Eb");
17215 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17216 case 4:
17217 IEMOP_MNEMONIC("mul Eb");
17218 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17219 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17220 case 5:
17221 IEMOP_MNEMONIC("imul Eb");
17222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17223 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17224 case 6:
17225 IEMOP_MNEMONIC("div Eb");
17226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17227 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17228 case 7:
17229 IEMOP_MNEMONIC("idiv Eb");
17230 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17231 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17233 }
17234}
17235
17236
17237/** Opcode 0xf7. */
17238FNIEMOP_DEF(iemOp_Grp3_Ev)
17239{
17240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17241 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17242 {
17243 case 0:
17244 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17245 case 1:
17246/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17247 return IEMOP_RAISE_INVALID_OPCODE();
17248 case 2:
17249 IEMOP_MNEMONIC("not Ev");
17250 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17251 case 3:
17252 IEMOP_MNEMONIC("neg Ev");
17253 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17254 case 4:
17255 IEMOP_MNEMONIC("mul Ev");
17256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17257 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17258 case 5:
17259 IEMOP_MNEMONIC("imul Ev");
17260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17261 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17262 case 6:
17263 IEMOP_MNEMONIC("div Ev");
17264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17265 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17266 case 7:
17267 IEMOP_MNEMONIC("idiv Ev");
17268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17269 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17271 }
17272}
17273
17274
17275/** Opcode 0xf8. */
17276FNIEMOP_DEF(iemOp_clc)
17277{
17278 IEMOP_MNEMONIC("clc");
17279 IEMOP_HLP_NO_LOCK_PREFIX();
17280 IEM_MC_BEGIN(0, 0);
17281 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17282 IEM_MC_ADVANCE_RIP();
17283 IEM_MC_END();
17284 return VINF_SUCCESS;
17285}
17286
17287
17288/** Opcode 0xf9. */
17289FNIEMOP_DEF(iemOp_stc)
17290{
17291 IEMOP_MNEMONIC("stc");
17292 IEMOP_HLP_NO_LOCK_PREFIX();
17293 IEM_MC_BEGIN(0, 0);
17294 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17295 IEM_MC_ADVANCE_RIP();
17296 IEM_MC_END();
17297 return VINF_SUCCESS;
17298}
17299
17300
17301/** Opcode 0xfa. */
17302FNIEMOP_DEF(iemOp_cli)
17303{
17304 IEMOP_MNEMONIC("cli");
17305 IEMOP_HLP_NO_LOCK_PREFIX();
17306 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17307}
17308
17309
17310FNIEMOP_DEF(iemOp_sti)
17311{
17312 IEMOP_MNEMONIC("sti");
17313 IEMOP_HLP_NO_LOCK_PREFIX();
17314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17315}
17316
17317
17318/** Opcode 0xfc. */
17319FNIEMOP_DEF(iemOp_cld)
17320{
17321 IEMOP_MNEMONIC("cld");
17322 IEMOP_HLP_NO_LOCK_PREFIX();
17323 IEM_MC_BEGIN(0, 0);
17324 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17325 IEM_MC_ADVANCE_RIP();
17326 IEM_MC_END();
17327 return VINF_SUCCESS;
17328}
17329
17330
17331/** Opcode 0xfd. */
17332FNIEMOP_DEF(iemOp_std)
17333{
17334 IEMOP_MNEMONIC("std");
17335 IEMOP_HLP_NO_LOCK_PREFIX();
17336 IEM_MC_BEGIN(0, 0);
17337 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17338 IEM_MC_ADVANCE_RIP();
17339 IEM_MC_END();
17340 return VINF_SUCCESS;
17341}
17342
17343
17344/** Opcode 0xfe. */
17345FNIEMOP_DEF(iemOp_Grp4)
17346{
17347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17348 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17349 {
17350 case 0:
17351 IEMOP_MNEMONIC("inc Ev");
17352 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17353 case 1:
17354 IEMOP_MNEMONIC("dec Ev");
17355 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17356 default:
17357 IEMOP_MNEMONIC("grp4-ud");
17358 return IEMOP_RAISE_INVALID_OPCODE();
17359 }
17360}
17361
17362
17363/**
17364 * Opcode 0xff /2.
17365 * @param bRm The RM byte.
17366 */
17367FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17368{
17369 IEMOP_MNEMONIC("calln Ev");
17370 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17371 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17372
17373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17374 {
17375 /* The new RIP is taken from a register. */
17376 switch (pIemCpu->enmEffOpSize)
17377 {
17378 case IEMMODE_16BIT:
17379 IEM_MC_BEGIN(1, 0);
17380 IEM_MC_ARG(uint16_t, u16Target, 0);
17381 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17382 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17383 IEM_MC_END()
17384 return VINF_SUCCESS;
17385
17386 case IEMMODE_32BIT:
17387 IEM_MC_BEGIN(1, 0);
17388 IEM_MC_ARG(uint32_t, u32Target, 0);
17389 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17390 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17391 IEM_MC_END()
17392 return VINF_SUCCESS;
17393
17394 case IEMMODE_64BIT:
17395 IEM_MC_BEGIN(1, 0);
17396 IEM_MC_ARG(uint64_t, u64Target, 0);
17397 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17398 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17399 IEM_MC_END()
17400 return VINF_SUCCESS;
17401
17402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17403 }
17404 }
17405 else
17406 {
17407 /* The new RIP is taken from a register. */
17408 switch (pIemCpu->enmEffOpSize)
17409 {
17410 case IEMMODE_16BIT:
17411 IEM_MC_BEGIN(1, 1);
17412 IEM_MC_ARG(uint16_t, u16Target, 0);
17413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17415 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17416 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17417 IEM_MC_END()
17418 return VINF_SUCCESS;
17419
17420 case IEMMODE_32BIT:
17421 IEM_MC_BEGIN(1, 1);
17422 IEM_MC_ARG(uint32_t, u32Target, 0);
17423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17425 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17426 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17427 IEM_MC_END()
17428 return VINF_SUCCESS;
17429
17430 case IEMMODE_64BIT:
17431 IEM_MC_BEGIN(1, 1);
17432 IEM_MC_ARG(uint64_t, u64Target, 0);
17433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17435 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17436 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17437 IEM_MC_END()
17438 return VINF_SUCCESS;
17439
17440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17441 }
17442 }
17443}
17444
17445typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17446
17447FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17448{
17449 /* Registers? How?? */
17450 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17451 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17452
17453 /* Far pointer loaded from memory. */
17454 switch (pIemCpu->enmEffOpSize)
17455 {
17456 case IEMMODE_16BIT:
17457 IEM_MC_BEGIN(3, 1);
17458 IEM_MC_ARG(uint16_t, u16Sel, 0);
17459 IEM_MC_ARG(uint16_t, offSeg, 1);
17460 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17464 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17465 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17466 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17467 IEM_MC_END();
17468 return VINF_SUCCESS;
17469
17470 case IEMMODE_64BIT:
17471 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17472 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17473 * and call far qword [rsp] encodings. */
17474 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17475 {
17476 IEM_MC_BEGIN(3, 1);
17477 IEM_MC_ARG(uint16_t, u16Sel, 0);
17478 IEM_MC_ARG(uint64_t, offSeg, 1);
17479 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17483 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17484 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17485 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17486 IEM_MC_END();
17487 return VINF_SUCCESS;
17488 }
17489 /* AMD falls thru. */
17490
17491 case IEMMODE_32BIT:
17492 IEM_MC_BEGIN(3, 1);
17493 IEM_MC_ARG(uint16_t, u16Sel, 0);
17494 IEM_MC_ARG(uint32_t, offSeg, 1);
17495 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17499 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17500 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17501 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17502 IEM_MC_END();
17503 return VINF_SUCCESS;
17504
17505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17506 }
17507}
17508
17509
17510/**
17511 * Opcode 0xff /3.
17512 * @param bRm The RM byte.
17513 */
17514FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17515{
17516 IEMOP_MNEMONIC("callf Ep");
17517 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17518}
17519
17520
17521/**
17522 * Opcode 0xff /4.
17523 * @param bRm The RM byte.
17524 */
17525FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17526{
17527 IEMOP_MNEMONIC("jmpn Ev");
17528 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17529 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17530
17531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17532 {
17533 /* The new RIP is taken from a register. */
17534 switch (pIemCpu->enmEffOpSize)
17535 {
17536 case IEMMODE_16BIT:
17537 IEM_MC_BEGIN(0, 1);
17538 IEM_MC_LOCAL(uint16_t, u16Target);
17539 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17540 IEM_MC_SET_RIP_U16(u16Target);
17541 IEM_MC_END()
17542 return VINF_SUCCESS;
17543
17544 case IEMMODE_32BIT:
17545 IEM_MC_BEGIN(0, 1);
17546 IEM_MC_LOCAL(uint32_t, u32Target);
17547 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17548 IEM_MC_SET_RIP_U32(u32Target);
17549 IEM_MC_END()
17550 return VINF_SUCCESS;
17551
17552 case IEMMODE_64BIT:
17553 IEM_MC_BEGIN(0, 1);
17554 IEM_MC_LOCAL(uint64_t, u64Target);
17555 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17556 IEM_MC_SET_RIP_U64(u64Target);
17557 IEM_MC_END()
17558 return VINF_SUCCESS;
17559
17560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17561 }
17562 }
17563 else
17564 {
17565 /* The new RIP is taken from a memory location. */
17566 switch (pIemCpu->enmEffOpSize)
17567 {
17568 case IEMMODE_16BIT:
17569 IEM_MC_BEGIN(0, 2);
17570 IEM_MC_LOCAL(uint16_t, u16Target);
17571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17573 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17574 IEM_MC_SET_RIP_U16(u16Target);
17575 IEM_MC_END()
17576 return VINF_SUCCESS;
17577
17578 case IEMMODE_32BIT:
17579 IEM_MC_BEGIN(0, 2);
17580 IEM_MC_LOCAL(uint32_t, u32Target);
17581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17583 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17584 IEM_MC_SET_RIP_U32(u32Target);
17585 IEM_MC_END()
17586 return VINF_SUCCESS;
17587
17588 case IEMMODE_64BIT:
17589 IEM_MC_BEGIN(0, 2);
17590 IEM_MC_LOCAL(uint64_t, u64Target);
17591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17593 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17594 IEM_MC_SET_RIP_U64(u64Target);
17595 IEM_MC_END()
17596 return VINF_SUCCESS;
17597
17598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17599 }
17600 }
17601}
17602
17603
17604/**
17605 * Opcode 0xff /5.
17606 * @param bRm The RM byte.
17607 */
17608FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17609{
17610 IEMOP_MNEMONIC("jmpf Ep");
17611 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17612}
17613
17614
17615/**
17616 * Opcode 0xff /6.
17617 * @param bRm The RM byte.
17618 */
17619FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17620{
17621 IEMOP_MNEMONIC("push Ev");
17622 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17623
17624 /* Registers are handled by a common worker. */
17625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17626 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17627
17628 /* Memory we do here. */
17629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17630 switch (pIemCpu->enmEffOpSize)
17631 {
17632 case IEMMODE_16BIT:
17633 IEM_MC_BEGIN(0, 2);
17634 IEM_MC_LOCAL(uint16_t, u16Src);
17635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17637 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17638 IEM_MC_PUSH_U16(u16Src);
17639 IEM_MC_ADVANCE_RIP();
17640 IEM_MC_END();
17641 return VINF_SUCCESS;
17642
17643 case IEMMODE_32BIT:
17644 IEM_MC_BEGIN(0, 2);
17645 IEM_MC_LOCAL(uint32_t, u32Src);
17646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17648 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17649 IEM_MC_PUSH_U32(u32Src);
17650 IEM_MC_ADVANCE_RIP();
17651 IEM_MC_END();
17652 return VINF_SUCCESS;
17653
17654 case IEMMODE_64BIT:
17655 IEM_MC_BEGIN(0, 2);
17656 IEM_MC_LOCAL(uint64_t, u64Src);
17657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17659 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17660 IEM_MC_PUSH_U64(u64Src);
17661 IEM_MC_ADVANCE_RIP();
17662 IEM_MC_END();
17663 return VINF_SUCCESS;
17664
17665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17666 }
17667}
17668
17669
17670/** Opcode 0xff. */
17671FNIEMOP_DEF(iemOp_Grp5)
17672{
17673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17674 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17675 {
17676 case 0:
17677 IEMOP_MNEMONIC("inc Ev");
17678 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17679 case 1:
17680 IEMOP_MNEMONIC("dec Ev");
17681 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17682 case 2:
17683 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17684 case 3:
17685 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17686 case 4:
17687 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17688 case 5:
17689 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17690 case 6:
17691 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17692 case 7:
17693 IEMOP_MNEMONIC("grp5-ud");
17694 return IEMOP_RAISE_INVALID_OPCODE();
17695 }
17696 AssertFailedReturn(VERR_IEM_IPE_3);
17697}
17698
17699
17700
17701const PFNIEMOP g_apfnOneByteMap[256] =
17702{
17703 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17704 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17705 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17706 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17707 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17708 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17709 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17710 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17711 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17712 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17713 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17714 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17715 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17716 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17717 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17718 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17719 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17720 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17721 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17722 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17723 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17724 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17725 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17726 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17727 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17728 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17729 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17730 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17731 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17732 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17733 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17734 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17735 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17736 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17737 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17738 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17739 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17740 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17741 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17742 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17743 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17744 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17745 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17746 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17747 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17748 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17749 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17750 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17751 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17752 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17753 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17754 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17755 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17756 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17757 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17758 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17759 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17760 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17761 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17762 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17763 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17764 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17765 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17766 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17767};
17768
17769
17770/** @} */
17771
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette