VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66131

Last change on this file since 66131 was 66131, checked in by vboxsync, 8 years ago

IEM: Some more ADC testcases.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 378.7 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66131 2017-03-16 14:24:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42
43/**
44 * @opcode 0x00
45 * @opmnemonic add
46 * @op1 rm:Eb
47 * @op2 reg:Gb
48 * @opmaps one
49 * @openc ModR/M
50 * @opflmodify cf,pf,af,zf,sf,of
51 * @ophints harmless ignores_op_size
52 * @opstats add_Eb_Gb
53 * @opgroup op_gen_arith_bin
54 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
55 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
56 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
57 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
58 */
59FNIEMOP_DEF(iemOp_add_Eb_Gb)
60{
61 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
62 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
63}
64
65
66/**
67 * @opcode 0x01
68 * @opgroup op_gen_arith_bin
69 * @opflmodify cf,pf,af,zf,sf,of
70 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
71 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
72 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
73 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
74 */
75FNIEMOP_DEF(iemOp_add_Ev_Gv)
76{
77 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
78 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
79}
80
81
82/**
83 * @opcode 0x02
84 * @opgroup op_gen_arith_bin
85 * @opflmodify cf,pf,af,zf,sf,of
86 * @opcopytests iemOp_add_Eb_Gb
87 */
88FNIEMOP_DEF(iemOp_add_Gb_Eb)
89{
90 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
91 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
92}
93
94
95/**
96 * @opcode 0x03
97 * @opgroup op_gen_arith_bin
98 * @opflmodify cf,pf,af,zf,sf,of
99 * @opcopytests iemOp_add_Ev_Gv
100 */
101FNIEMOP_DEF(iemOp_add_Gv_Ev)
102{
103 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
105}
106
107
108/**
109 * @opcode 0x04
110 * @opgroup op_gen_arith_bin
111 * @opflmodify cf,pf,af,zf,sf,of
112 * @opcopytests iemOp_add_Eb_Gb
113 */
114FNIEMOP_DEF(iemOp_add_Al_Ib)
115{
116 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
117 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
118}
119
120
121/**
122 * @opcode 0x05
123 * @opgroup op_gen_arith_bin
124 * @opflmodify cf,pf,af,zf,sf,of
125 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
126 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
127 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
128 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
129 */
130FNIEMOP_DEF(iemOp_add_eAX_Iz)
131{
132 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
133 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
134}
135
136
137/**
138 * @opcode 0x06
139 * @opgroup op_stack_sreg
140 */
141FNIEMOP_DEF(iemOp_push_ES)
142{
143 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
144 IEMOP_HLP_NO_64BIT();
145 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
146}
147
148
149/**
150 * @opcode 0x07
151 * @opgroup op_stack_sreg
152 */
153FNIEMOP_DEF(iemOp_pop_ES)
154{
155 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
156 IEMOP_HLP_NO_64BIT();
157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
158 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
159}
160
161
162/**
163 * @opcode 0x08
164 * @opgroup op_gen_arith_bin
165 * @opflmodify cf,pf,af,zf,sf,of
166 * @opflundef af
167 * @opflclear of,cf
168 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
169 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
170 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
171 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
172 */
173FNIEMOP_DEF(iemOp_or_Eb_Gb)
174{
175 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
178}
179
180
181/*
182 * @opcode 0x09
183 * @opgroup op_gen_arith_bin
184 * @opflmodify cf,pf,af,zf,sf,of
185 * @opflundef af
186 * @opflclear of,cf
187 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
188 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
189 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
190 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
191 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
192 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
193 */
194FNIEMOP_DEF(iemOp_or_Ev_Gv)
195{
196 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
198 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
199}
200
201
202/**
203 * @opcode 0x0a
204 * @opgroup op_gen_arith_bin
205 * @opflmodify cf,pf,af,zf,sf,of
206 * @opflundef af
207 * @opflclear of,cf
208 * @opcopytests iemOp_or_Eb_Gb
209 */
210FNIEMOP_DEF(iemOp_or_Gb_Eb)
211{
212 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
214 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
215}
216
217
218/**
219 * @opcode 0x0b
220 * @opgroup op_gen_arith_bin
221 * @opflmodify cf,pf,af,zf,sf,of
222 * @opflundef af
223 * @opflclear of,cf
224 * @opcopytests iemOp_or_Ev_Gv
225 */
226FNIEMOP_DEF(iemOp_or_Gv_Ev)
227{
228 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
230 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
231}
232
233
234/**
235 * @opcode 0x0c
236 * @opgroup op_gen_arith_bin
237 * @opflmodify cf,pf,af,zf,sf,of
238 * @opflundef af
239 * @opflclear of,cf
240 * @opcopytests iemOp_or_Eb_Gb
241 */
242FNIEMOP_DEF(iemOp_or_Al_Ib)
243{
244 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
246 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
247}
248
249
250/**
251 * @opcode 0x0d
252 * @opgroup op_gen_arith_bin
253 * @opflmodify cf,pf,af,zf,sf,of
254 * @opflundef af
255 * @opflclear of,cf
256 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
257 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
258 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
259 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
260 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
261 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
262 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
263 */
264FNIEMOP_DEF(iemOp_or_eAX_Iz)
265{
266 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
268 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
269}
270
271
272/**
273 * @opcode 0x0e
274 * @opgroup op_stack_sreg
275 */
276FNIEMOP_DEF(iemOp_push_CS)
277{
278 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
279 IEMOP_HLP_NO_64BIT();
280 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
281}
282
283
284/**
285 * @opcode 0x0f
286 * @opmnemonic EscTwo0f
287 * @openc two0f
288 * @opdisenum OP_2B_ESC
289 * @ophints harmless
290 * @opgroup op_escapes
291 */
292FNIEMOP_DEF(iemOp_2byteEscape)
293{
294#ifdef VBOX_STRICT
295 /* Sanity check the table the first time around. */
296 static bool s_fTested = false;
297 if (RT_LIKELY(s_fTested)) { /* likely */ }
298 else
299 {
300 s_fTested = true;
301 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
302 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
303 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
304 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
305 }
306#endif
307
308 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
309 {
310 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
311 IEMOP_HLP_MIN_286();
312 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
313 }
314 /* @opdone */
315
316 /*
317 * On the 8086 this is a POP CS instruction.
318 * For the time being we don't specify this this.
319 */
320 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
321 IEMOP_HLP_NO_64BIT();
322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
323 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
324}
325
326/**
327 * @opcode 0x10
328 * @opgroup op_gen_arith_bin
329 * @opfltest cf
330 * @opflmodify cf,pf,af,zf,sf,of
331 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
332 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
333 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
334 */
335FNIEMOP_DEF(iemOp_adc_Eb_Gb)
336{
337 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
338 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
339}
340
341
342/**
343 * @opcode 0x11
344 * @opgroup op_gen_arith_bin
345 * @opfltest cf
346 * @opflmodify cf,pf,af,zf,sf,of
347 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
348 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
349 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
350 */
351FNIEMOP_DEF(iemOp_adc_Ev_Gv)
352{
353 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
354 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
355}
356
357
358/**
359 * @opcode 0x12
360 * @opgroup op_gen_arith_bin
361 * @opfltest cf
362 * @opflmodify cf,pf,af,zf,sf,of
363 * @opcopytests iemOp_adc_Eb_Gb
364 */
365FNIEMOP_DEF(iemOp_adc_Gb_Eb)
366{
367 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
369}
370
371
372/**
373 * @opcode 0x13
374 * @opgroup op_gen_arith_bin
375 * @opfltest cf
376 * @opflmodify cf,pf,af,zf,sf,of
377 * @opcopytests iemOp_adc_Ev_Gv
378 */
379FNIEMOP_DEF(iemOp_adc_Gv_Ev)
380{
381 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
382 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
383}
384
385
386/**
387 * @opcode 0x14
388 * @opgroup op_gen_arith_bin
389 * @opfltest cf
390 * @opflmodify cf,pf,af,zf,sf,of
391 * @opcopytests iemOp_adc_Eb_Gb
392 */
393FNIEMOP_DEF(iemOp_adc_Al_Ib)
394{
395 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
396 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
397}
398
399
400/**
401 * @opcode 0x15
402 * @opgroup op_gen_arith_bin
403 * @opfltest cf
404 * @opflmodify cf,pf,af,zf,sf,of
405 * @opcopytests iemOp_adc_Ev_Gv
406 */
407FNIEMOP_DEF(iemOp_adc_eAX_Iz)
408{
409 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
410 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
411}
412
413
414/**
415 * @opcode 0x16
416 */
417FNIEMOP_DEF(iemOp_push_SS)
418{
419 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
420 IEMOP_HLP_NO_64BIT();
421 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
422}
423
424
425/**
426 * @opcode 0x17
427 * @opgroup op_gen_arith_bin
428 * @opfltest cf
429 * @opflmodify cf,pf,af,zf,sf,of
430 */
431FNIEMOP_DEF(iemOp_pop_SS)
432{
433 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
435 IEMOP_HLP_NO_64BIT();
436 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
437}
438
439
440/**
441 * @opcode 0x18
442 * @opgroup op_gen_arith_bin
443 * @opfltest cf
444 * @opflmodify cf,pf,af,zf,sf,of
445 */
446FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
447{
448 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
449 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
450}
451
452
453/**
454 * @opcode 0x19
455 * @opgroup op_gen_arith_bin
456 * @opfltest cf
457 * @opflmodify cf,pf,af,zf,sf,of
458 */
459FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
460{
461 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
463}
464
465
466/**
467 * @opcode 0x1a
468 * @opgroup op_gen_arith_bin
469 * @opfltest cf
470 * @opflmodify cf,pf,af,zf,sf,of
471 */
472FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
473{
474 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
475 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
476}
477
478
479/**
480 * @opcode 0x1b
481 * @opgroup op_gen_arith_bin
482 * @opfltest cf
483 * @opflmodify cf,pf,af,zf,sf,of
484 */
485FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
486{
487 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
488 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
489}
490
491
492/**
493 * @opcode 0x1c
494 * @opgroup op_gen_arith_bin
495 * @opfltest cf
496 * @opflmodify cf,pf,af,zf,sf,of
497 */
498FNIEMOP_DEF(iemOp_sbb_Al_Ib)
499{
500 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
501 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
502}
503
504
505/**
506 * @opcode 0x1d
507 * @opgroup op_gen_arith_bin
508 * @opfltest cf
509 * @opflmodify cf,pf,af,zf,sf,of
510 */
511FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
512{
513 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
514 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
515}
516
517
518/**
519 * @opcode 0x1e
520 * @opgroup op_stack_sreg
521 */
522FNIEMOP_DEF(iemOp_push_DS)
523{
524 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
525 IEMOP_HLP_NO_64BIT();
526 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
527}
528
529
530/**
531 * @opcode 0x1f
532 * @opgroup op_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_pop_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
538 IEMOP_HLP_NO_64BIT();
539 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
540}
541
542
543/**
544 * @opcode 0x20
545 * @opgroup op_gen_arith_bin
546 * @opflmodify cf,pf,af,zf,sf,of
547 * @opflundef af
548 * @opflclear of,cf
549 */
550FNIEMOP_DEF(iemOp_and_Eb_Gb)
551{
552 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
553 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
554 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
555}
556
557
558/**
559 * @opcode 0x21
560 * @opgroup op_gen_arith_bin
561 * @opflmodify cf,pf,af,zf,sf,of
562 * @opflundef af
563 * @opflclear of,cf
564 */
565FNIEMOP_DEF(iemOp_and_Ev_Gv)
566{
567 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
569 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
570}
571
572
573/**
574 * @opcode 0x22
575 * @opgroup op_gen_arith_bin
576 * @opflmodify cf,pf,af,zf,sf,of
577 * @opflundef af
578 * @opflclear of,cf
579 */
580FNIEMOP_DEF(iemOp_and_Gb_Eb)
581{
582 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
583 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
584 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
585}
586
587
588/**
589 * @opcode 0x23
590 * @opgroup op_gen_arith_bin
591 * @opflmodify cf,pf,af,zf,sf,of
592 * @opflundef af
593 * @opflclear of,cf
594 */
595FNIEMOP_DEF(iemOp_and_Gv_Ev)
596{
597 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
598 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
599 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
600}
601
602
603/**
604 * @opcode 0x24
605 * @opgroup op_gen_arith_bin
606 * @opflmodify cf,pf,af,zf,sf,of
607 * @opflundef af
608 * @opflclear of,cf
609 */
610FNIEMOP_DEF(iemOp_and_Al_Ib)
611{
612 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
613 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
614 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
615}
616
617
618/**
619 * @opcode 0x25
620 * @opgroup op_gen_arith_bin
621 * @opflmodify cf,pf,af,zf,sf,of
622 * @opflundef af
623 * @opflclear of,cf
624 */
625FNIEMOP_DEF(iemOp_and_eAX_Iz)
626{
627 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
629 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
630}
631
632
633/**
634 * @opcode 0x26
635 * @opmnemonic SEG
636 * @op1 ES
637 * @opgroup op_prefix
638 * @openc prefix
639 * @opdisenum OP_SEG
640 * @ophints harmless
641 */
642FNIEMOP_DEF(iemOp_seg_ES)
643{
644 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
645 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
646 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
647
648 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
649 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
650}
651
652
653/**
654 * @opcode 0x27
655 * @opfltest af,cf
656 * @opflmodify cf,pf,af,zf,sf,of
657 * @opflundef of
658 */
659FNIEMOP_DEF(iemOp_daa)
660{
661 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
662 IEMOP_HLP_NO_64BIT();
663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
664 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
665 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
666}
667
668
669/**
670 * @opcode 0x28
671 * @opgroup op_gen_arith_bin
672 * @opflmodify cf,pf,af,zf,sf,of
673 */
674FNIEMOP_DEF(iemOp_sub_Eb_Gb)
675{
676 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
677 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
678}
679
680
681/**
682 * @opcode 0x29
683 * @opgroup op_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Ev_Gv)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x2a
695 * @opgroup op_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Gb_Eb)
699{
700 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2b
707 * @opgroup op_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gv_Ev)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2c
719 * @opgroup op_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Al_Ib)
723{
724 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2d
731 * @opgroup op_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2e
743 * @opmnemonic SEG
744 * @op1 CS
745 * @opgroup op_prefix
746 * @openc prefix
747 * @opdisenum OP_SEG
748 * @ophints harmless
749 */
750FNIEMOP_DEF(iemOp_seg_CS)
751{
752 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
753 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
754 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
755
756 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
757 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
758}
759
760
761/**
762 * @opcode 0x2f
763 * @opfltest af,cf
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opflundef of
766 */
767FNIEMOP_DEF(iemOp_das)
768{
769 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
770 IEMOP_HLP_NO_64BIT();
771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
773 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
774}
775
776
777/**
778 * @opcode 0x30
779 * @opgroup op_gen_arith_bin
780 * @opflmodify cf,pf,af,zf,sf,of
781 * @opflundef af
782 * @opflclear of,cf
783 */
784FNIEMOP_DEF(iemOp_xor_Eb_Gb)
785{
786 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
788 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
789}
790
791
792/**
793 * @opcode 0x31
794 * @opgroup op_gen_arith_bin
795 * @opflmodify cf,pf,af,zf,sf,of
796 * @opflundef af
797 * @opflclear of,cf
798 */
799FNIEMOP_DEF(iemOp_xor_Ev_Gv)
800{
801 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, 0);
802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
803 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
804}
805
806
807/**
808 * @opcode 0x32
809 * @opgroup op_gen_arith_bin
810 * @opflmodify cf,pf,af,zf,sf,of
811 * @opflundef af
812 * @opflclear of,cf
813 */
814FNIEMOP_DEF(iemOp_xor_Gb_Eb)
815{
816 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
818 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
819}
820
821
822/**
823 * @opcode 0x33
824 * @opgroup op_gen_arith_bin
825 * @opflmodify cf,pf,af,zf,sf,of
826 * @opflundef af
827 * @opflclear of,cf
828 */
829FNIEMOP_DEF(iemOp_xor_Gv_Ev)
830{
831 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
832 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
833 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
834}
835
836
837/**
838 * @opcode 0x34
839 * @opgroup op_gen_arith_bin
840 * @opflmodify cf,pf,af,zf,sf,of
841 * @opflundef af
842 * @opflclear of,cf
843 */
844FNIEMOP_DEF(iemOp_xor_Al_Ib)
845{
846 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
847 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
848 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
849}
850
851
852/**
853 * @opcode 0x35
854 * @opgroup op_gen_arith_bin
855 * @opflmodify cf,pf,af,zf,sf,of
856 * @opflundef af
857 * @opflclear of,cf
858 */
859FNIEMOP_DEF(iemOp_xor_eAX_Iz)
860{
861 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
863 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
864}
865
866
867/**
868 * @opcode 0x36
869 */
870FNIEMOP_DEF(iemOp_seg_SS)
871{
872 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
873 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
874 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
875
876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
878}
879
880
881/**
882 * @opcode 0x37
883 */
884FNIEMOP_STUB(iemOp_aaa);
885
886
887/**
888 * @opcode 0x38
889 */
890FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
891{
892 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
893 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
894}
895
896
897/**
898 * @opcode 0x39
899 */
900FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
901{
902 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
903 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
904}
905
906
907/**
908 * @opcode 0x3a
909 */
910FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
911{
912 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
913 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
914}
915
916
917/**
918 * @opcode 0x3b
919 */
920FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
921{
922 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
923 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
924}
925
926
927/**
928 * @opcode 0x3c
929 */
930FNIEMOP_DEF(iemOp_cmp_Al_Ib)
931{
932 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
933 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
934}
935
936
937/**
938 * @opcode 0x3d
939 */
940FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
941{
942 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
943 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
944}
945
946
947/**
948 * @opcode 0x3e
949 */
950FNIEMOP_DEF(iemOp_seg_DS)
951{
952 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
953 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
954 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
955
956 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
957 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
958}
959
960
961/**
962 * @opcode 0x3f
963 */
964FNIEMOP_STUB(iemOp_aas);
965
966/**
967 * Common 'inc/dec/not/neg register' helper.
968 */
969FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
970{
971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
972 switch (pVCpu->iem.s.enmEffOpSize)
973 {
974 case IEMMODE_16BIT:
975 IEM_MC_BEGIN(2, 0);
976 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
977 IEM_MC_ARG(uint32_t *, pEFlags, 1);
978 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
979 IEM_MC_REF_EFLAGS(pEFlags);
980 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
981 IEM_MC_ADVANCE_RIP();
982 IEM_MC_END();
983 return VINF_SUCCESS;
984
985 case IEMMODE_32BIT:
986 IEM_MC_BEGIN(2, 0);
987 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
988 IEM_MC_ARG(uint32_t *, pEFlags, 1);
989 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
990 IEM_MC_REF_EFLAGS(pEFlags);
991 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
992 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
993 IEM_MC_ADVANCE_RIP();
994 IEM_MC_END();
995 return VINF_SUCCESS;
996
997 case IEMMODE_64BIT:
998 IEM_MC_BEGIN(2, 0);
999 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1000 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1001 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1002 IEM_MC_REF_EFLAGS(pEFlags);
1003 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1004 IEM_MC_ADVANCE_RIP();
1005 IEM_MC_END();
1006 return VINF_SUCCESS;
1007 }
1008 return VINF_SUCCESS;
1009}
1010
1011
1012/**
1013 * @opcode 0x40
1014 */
1015FNIEMOP_DEF(iemOp_inc_eAX)
1016{
1017 /*
1018 * This is a REX prefix in 64-bit mode.
1019 */
1020 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1021 {
1022 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1023 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1024
1025 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1026 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1027 }
1028
1029 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1030 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1031}
1032
1033
1034/**
1035 * @opcode 0x41
1036 */
1037FNIEMOP_DEF(iemOp_inc_eCX)
1038{
1039 /*
1040 * This is a REX prefix in 64-bit mode.
1041 */
1042 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1043 {
1044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1046 pVCpu->iem.s.uRexB = 1 << 3;
1047
1048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1050 }
1051
1052 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1053 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1054}
1055
1056
1057/**
1058 * @opcode 0x42
1059 */
1060FNIEMOP_DEF(iemOp_inc_eDX)
1061{
1062 /*
1063 * This is a REX prefix in 64-bit mode.
1064 */
1065 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1066 {
1067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1069 pVCpu->iem.s.uRexIndex = 1 << 3;
1070
1071 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1072 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1073 }
1074
1075 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1076 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1077}
1078
1079
1080
1081/**
1082 * @opcode 0x43
1083 */
1084FNIEMOP_DEF(iemOp_inc_eBX)
1085{
1086 /*
1087 * This is a REX prefix in 64-bit mode.
1088 */
1089 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1090 {
1091 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1092 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1093 pVCpu->iem.s.uRexB = 1 << 3;
1094 pVCpu->iem.s.uRexIndex = 1 << 3;
1095
1096 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1097 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1098 }
1099
1100 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1101 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1102}
1103
1104
1105/**
1106 * @opcode 0x44
1107 */
1108FNIEMOP_DEF(iemOp_inc_eSP)
1109{
1110 /*
1111 * This is a REX prefix in 64-bit mode.
1112 */
1113 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1114 {
1115 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1116 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1117 pVCpu->iem.s.uRexReg = 1 << 3;
1118
1119 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1120 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1121 }
1122
1123 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1124 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1125}
1126
1127
1128/**
1129 * @opcode 0x45
1130 */
1131FNIEMOP_DEF(iemOp_inc_eBP)
1132{
1133 /*
1134 * This is a REX prefix in 64-bit mode.
1135 */
1136 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1137 {
1138 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1139 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1140 pVCpu->iem.s.uRexReg = 1 << 3;
1141 pVCpu->iem.s.uRexB = 1 << 3;
1142
1143 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1144 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1145 }
1146
1147 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1148 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1149}
1150
1151
1152/**
1153 * @opcode 0x46
1154 */
1155FNIEMOP_DEF(iemOp_inc_eSI)
1156{
1157 /*
1158 * This is a REX prefix in 64-bit mode.
1159 */
1160 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1161 {
1162 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1163 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1164 pVCpu->iem.s.uRexReg = 1 << 3;
1165 pVCpu->iem.s.uRexIndex = 1 << 3;
1166
1167 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1168 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1169 }
1170
1171 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1172 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1173}
1174
1175
1176/**
1177 * @opcode 0x47
1178 */
1179FNIEMOP_DEF(iemOp_inc_eDI)
1180{
1181 /*
1182 * This is a REX prefix in 64-bit mode.
1183 */
1184 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1185 {
1186 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1187 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1188 pVCpu->iem.s.uRexReg = 1 << 3;
1189 pVCpu->iem.s.uRexB = 1 << 3;
1190 pVCpu->iem.s.uRexIndex = 1 << 3;
1191
1192 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1193 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1194 }
1195
1196 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1197 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1198}
1199
1200
1201/**
1202 * @opcode 0x48
1203 */
1204FNIEMOP_DEF(iemOp_dec_eAX)
1205{
1206 /*
1207 * This is a REX prefix in 64-bit mode.
1208 */
1209 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1210 {
1211 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1212 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1213 iemRecalEffOpSize(pVCpu);
1214
1215 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1216 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1217 }
1218
1219 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1220 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1221}
1222
1223
1224/**
1225 * @opcode 0x49
1226 */
1227FNIEMOP_DEF(iemOp_dec_eCX)
1228{
1229 /*
1230 * This is a REX prefix in 64-bit mode.
1231 */
1232 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1233 {
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1236 pVCpu->iem.s.uRexB = 1 << 3;
1237 iemRecalEffOpSize(pVCpu);
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241 }
1242
1243 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1244 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1245}
1246
1247
1248/**
1249 * @opcode 0x4a
1250 */
1251FNIEMOP_DEF(iemOp_dec_eDX)
1252{
1253 /*
1254 * This is a REX prefix in 64-bit mode.
1255 */
1256 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1257 {
1258 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1260 pVCpu->iem.s.uRexIndex = 1 << 3;
1261 iemRecalEffOpSize(pVCpu);
1262
1263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1265 }
1266
1267 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1268 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1269}
1270
1271
1272/**
1273 * @opcode 0x4b
1274 */
1275FNIEMOP_DEF(iemOp_dec_eBX)
1276{
1277 /*
1278 * This is a REX prefix in 64-bit mode.
1279 */
1280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1281 {
1282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1284 pVCpu->iem.s.uRexB = 1 << 3;
1285 pVCpu->iem.s.uRexIndex = 1 << 3;
1286 iemRecalEffOpSize(pVCpu);
1287
1288 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1289 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1290 }
1291
1292 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1293 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1294}
1295
1296
1297/**
1298 * @opcode 0x4c
1299 */
1300FNIEMOP_DEF(iemOp_dec_eSP)
1301{
1302 /*
1303 * This is a REX prefix in 64-bit mode.
1304 */
1305 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1306 {
1307 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1308 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1309 pVCpu->iem.s.uRexReg = 1 << 3;
1310 iemRecalEffOpSize(pVCpu);
1311
1312 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1313 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1314 }
1315
1316 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1317 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1318}
1319
1320
1321/**
1322 * @opcode 0x4d
1323 */
1324FNIEMOP_DEF(iemOp_dec_eBP)
1325{
1326 /*
1327 * This is a REX prefix in 64-bit mode.
1328 */
1329 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1330 {
1331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1332 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1333 pVCpu->iem.s.uRexReg = 1 << 3;
1334 pVCpu->iem.s.uRexB = 1 << 3;
1335 iemRecalEffOpSize(pVCpu);
1336
1337 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1338 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1339 }
1340
1341 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1342 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1343}
1344
1345
1346/**
1347 * @opcode 0x4e
1348 */
1349FNIEMOP_DEF(iemOp_dec_eSI)
1350{
1351 /*
1352 * This is a REX prefix in 64-bit mode.
1353 */
1354 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1355 {
1356 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1357 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1358 pVCpu->iem.s.uRexReg = 1 << 3;
1359 pVCpu->iem.s.uRexIndex = 1 << 3;
1360 iemRecalEffOpSize(pVCpu);
1361
1362 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1363 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1364 }
1365
1366 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1367 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1368}
1369
1370
1371/**
1372 * @opcode 0x4f
1373 */
1374FNIEMOP_DEF(iemOp_dec_eDI)
1375{
1376 /*
1377 * This is a REX prefix in 64-bit mode.
1378 */
1379 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1380 {
1381 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1382 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1383 pVCpu->iem.s.uRexReg = 1 << 3;
1384 pVCpu->iem.s.uRexB = 1 << 3;
1385 pVCpu->iem.s.uRexIndex = 1 << 3;
1386 iemRecalEffOpSize(pVCpu);
1387
1388 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1389 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1390 }
1391
1392 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1393 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1394}
1395
1396
1397/**
1398 * Common 'push register' helper.
1399 */
1400FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1401{
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1404 {
1405 iReg |= pVCpu->iem.s.uRexB;
1406 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1407 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1408 }
1409
1410 switch (pVCpu->iem.s.enmEffOpSize)
1411 {
1412 case IEMMODE_16BIT:
1413 IEM_MC_BEGIN(0, 1);
1414 IEM_MC_LOCAL(uint16_t, u16Value);
1415 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1416 IEM_MC_PUSH_U16(u16Value);
1417 IEM_MC_ADVANCE_RIP();
1418 IEM_MC_END();
1419 break;
1420
1421 case IEMMODE_32BIT:
1422 IEM_MC_BEGIN(0, 1);
1423 IEM_MC_LOCAL(uint32_t, u32Value);
1424 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1425 IEM_MC_PUSH_U32(u32Value);
1426 IEM_MC_ADVANCE_RIP();
1427 IEM_MC_END();
1428 break;
1429
1430 case IEMMODE_64BIT:
1431 IEM_MC_BEGIN(0, 1);
1432 IEM_MC_LOCAL(uint64_t, u64Value);
1433 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1434 IEM_MC_PUSH_U64(u64Value);
1435 IEM_MC_ADVANCE_RIP();
1436 IEM_MC_END();
1437 break;
1438 }
1439
1440 return VINF_SUCCESS;
1441}
1442
1443
1444/**
1445 * @opcode 0x50
1446 */
1447FNIEMOP_DEF(iemOp_push_eAX)
1448{
1449 IEMOP_MNEMONIC(push_rAX, "push rAX");
1450 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1451}
1452
1453
1454/**
1455 * @opcode 0x51
1456 */
1457FNIEMOP_DEF(iemOp_push_eCX)
1458{
1459 IEMOP_MNEMONIC(push_rCX, "push rCX");
1460 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1461}
1462
1463
1464/**
1465 * @opcode 0x52
1466 */
1467FNIEMOP_DEF(iemOp_push_eDX)
1468{
1469 IEMOP_MNEMONIC(push_rDX, "push rDX");
1470 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1471}
1472
1473
1474/**
1475 * @opcode 0x53
1476 */
1477FNIEMOP_DEF(iemOp_push_eBX)
1478{
1479 IEMOP_MNEMONIC(push_rBX, "push rBX");
1480 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1481}
1482
1483
1484/**
1485 * @opcode 0x54
1486 */
1487FNIEMOP_DEF(iemOp_push_eSP)
1488{
1489 IEMOP_MNEMONIC(push_rSP, "push rSP");
1490 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1491 {
1492 IEM_MC_BEGIN(0, 1);
1493 IEM_MC_LOCAL(uint16_t, u16Value);
1494 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1495 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1496 IEM_MC_PUSH_U16(u16Value);
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 }
1500 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1501}
1502
1503
1504/**
1505 * @opcode 0x55
1506 */
1507FNIEMOP_DEF(iemOp_push_eBP)
1508{
1509 IEMOP_MNEMONIC(push_rBP, "push rBP");
1510 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1511}
1512
1513
1514/**
1515 * @opcode 0x56
1516 */
1517FNIEMOP_DEF(iemOp_push_eSI)
1518{
1519 IEMOP_MNEMONIC(push_rSI, "push rSI");
1520 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1521}
1522
1523
1524/**
1525 * @opcode 0x57
1526 */
1527FNIEMOP_DEF(iemOp_push_eDI)
1528{
1529 IEMOP_MNEMONIC(push_rDI, "push rDI");
1530 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1531}
1532
1533
1534/**
1535 * Common 'pop register' helper.
1536 */
1537FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1538{
1539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1540 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1541 {
1542 iReg |= pVCpu->iem.s.uRexB;
1543 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1544 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1545 }
1546
1547 switch (pVCpu->iem.s.enmEffOpSize)
1548 {
1549 case IEMMODE_16BIT:
1550 IEM_MC_BEGIN(0, 1);
1551 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1552 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1553 IEM_MC_POP_U16(pu16Dst);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 break;
1557
1558 case IEMMODE_32BIT:
1559 IEM_MC_BEGIN(0, 1);
1560 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1561 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1562 IEM_MC_POP_U32(pu32Dst);
1563 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 break;
1567
1568 case IEMMODE_64BIT:
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1571 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1572 IEM_MC_POP_U64(pu64Dst);
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 break;
1576 }
1577
1578 return VINF_SUCCESS;
1579}
1580
1581
1582/**
1583 * @opcode 0x58
1584 */
1585FNIEMOP_DEF(iemOp_pop_eAX)
1586{
1587 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1588 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1589}
1590
1591
1592/**
1593 * @opcode 0x59
1594 */
1595FNIEMOP_DEF(iemOp_pop_eCX)
1596{
1597 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1598 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1599}
1600
1601
1602/**
1603 * @opcode 0x5a
1604 */
1605FNIEMOP_DEF(iemOp_pop_eDX)
1606{
1607 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1608 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1609}
1610
1611
1612/**
1613 * @opcode 0x5b
1614 */
1615FNIEMOP_DEF(iemOp_pop_eBX)
1616{
1617 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1618 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1619}
1620
1621
1622/**
1623 * @opcode 0x5c
1624 */
1625FNIEMOP_DEF(iemOp_pop_eSP)
1626{
1627 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1628 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1629 {
1630 if (pVCpu->iem.s.uRexB)
1631 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1632 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1633 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1634 }
1635
1636 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1637 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1638 /** @todo add testcase for this instruction. */
1639 switch (pVCpu->iem.s.enmEffOpSize)
1640 {
1641 case IEMMODE_16BIT:
1642 IEM_MC_BEGIN(0, 1);
1643 IEM_MC_LOCAL(uint16_t, u16Dst);
1644 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1645 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1646 IEM_MC_ADVANCE_RIP();
1647 IEM_MC_END();
1648 break;
1649
1650 case IEMMODE_32BIT:
1651 IEM_MC_BEGIN(0, 1);
1652 IEM_MC_LOCAL(uint32_t, u32Dst);
1653 IEM_MC_POP_U32(&u32Dst);
1654 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1655 IEM_MC_ADVANCE_RIP();
1656 IEM_MC_END();
1657 break;
1658
1659 case IEMMODE_64BIT:
1660 IEM_MC_BEGIN(0, 1);
1661 IEM_MC_LOCAL(uint64_t, u64Dst);
1662 IEM_MC_POP_U64(&u64Dst);
1663 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1664 IEM_MC_ADVANCE_RIP();
1665 IEM_MC_END();
1666 break;
1667 }
1668
1669 return VINF_SUCCESS;
1670}
1671
1672
1673/**
1674 * @opcode 0x5d
1675 */
1676FNIEMOP_DEF(iemOp_pop_eBP)
1677{
1678 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1679 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1680}
1681
1682
1683/**
1684 * @opcode 0x5e
1685 */
1686FNIEMOP_DEF(iemOp_pop_eSI)
1687{
1688 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1689 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1690}
1691
1692
1693/**
1694 * @opcode 0x5f
1695 */
1696FNIEMOP_DEF(iemOp_pop_eDI)
1697{
1698 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1699 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1700}
1701
1702
1703/**
1704 * @opcode 0x60
1705 */
1706FNIEMOP_DEF(iemOp_pusha)
1707{
1708 IEMOP_MNEMONIC(pusha, "pusha");
1709 IEMOP_HLP_MIN_186();
1710 IEMOP_HLP_NO_64BIT();
1711 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1712 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1713 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1714 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1715}
1716
1717
1718/**
1719 * @opcode 0x61
1720 */
1721FNIEMOP_DEF(iemOp_popa__mvex)
1722{
1723 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1724 {
1725 IEMOP_MNEMONIC(popa, "popa");
1726 IEMOP_HLP_MIN_186();
1727 IEMOP_HLP_NO_64BIT();
1728 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1730 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1732 }
1733 IEMOP_MNEMONIC(mvex, "mvex");
1734 Log(("mvex prefix is not supported!\n"));
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736}
1737
1738
1739/**
1740 * @opcode 0x62
1741 * @opmnemonic bound
1742 * @op1 Gv
1743 * @op2 Ma
1744 * @opmincpu 80186
1745 * @ophints harmless invalid_64
1746 */
1747FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1748// IEMOP_HLP_MIN_186();
1749
1750
1751/** Opcode 0x63 - non-64-bit modes. */
1752FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1753{
1754 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1755 IEMOP_HLP_MIN_286();
1756 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1758
1759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1760 {
1761 /* Register */
1762 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1763 IEM_MC_BEGIN(3, 0);
1764 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1765 IEM_MC_ARG(uint16_t, u16Src, 1);
1766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1767
1768 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1769 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1770 IEM_MC_REF_EFLAGS(pEFlags);
1771 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1772
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 }
1776 else
1777 {
1778 /* Memory */
1779 IEM_MC_BEGIN(3, 2);
1780 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1781 IEM_MC_ARG(uint16_t, u16Src, 1);
1782 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1784
1785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1786 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1787 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1788 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1789 IEM_MC_FETCH_EFLAGS(EFlags);
1790 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1791
1792 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1793 IEM_MC_COMMIT_EFLAGS(EFlags);
1794 IEM_MC_ADVANCE_RIP();
1795 IEM_MC_END();
1796 }
1797 return VINF_SUCCESS;
1798
1799}
1800
1801
1802/**
1803 * @opcode 0x63
1804 *
1805 * @note This is a weird one. It works like a regular move instruction if
1806 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1807 * @todo This definitely needs a testcase to verify the odd cases. */
1808FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1809{
1810 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1811
1812 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1814
1815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1816 {
1817 /*
1818 * Register to register.
1819 */
1820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1821 IEM_MC_BEGIN(0, 1);
1822 IEM_MC_LOCAL(uint64_t, u64Value);
1823 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1824 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1825 IEM_MC_ADVANCE_RIP();
1826 IEM_MC_END();
1827 }
1828 else
1829 {
1830 /*
1831 * We're loading a register from memory.
1832 */
1833 IEM_MC_BEGIN(0, 2);
1834 IEM_MC_LOCAL(uint64_t, u64Value);
1835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1838 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1839 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1840 IEM_MC_ADVANCE_RIP();
1841 IEM_MC_END();
1842 }
1843 return VINF_SUCCESS;
1844}
1845
1846
1847/**
1848 * @opcode 0x64
1849 * @opmnemonic segfs
1850 * @opmincpu 80386
1851 * @opgroup op_prefixes
1852 */
1853FNIEMOP_DEF(iemOp_seg_FS)
1854{
1855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1856 IEMOP_HLP_MIN_386();
1857
1858 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1859 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1860
1861 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1862 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1863}
1864
1865
1866/**
1867 * @opcode 0x65
1868 * @opmnemonic seggs
1869 * @opmincpu 80386
1870 * @opgroup op_prefixes
1871 */
1872FNIEMOP_DEF(iemOp_seg_GS)
1873{
1874 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1875 IEMOP_HLP_MIN_386();
1876
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1878 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1879
1880 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1881 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1882}
1883
1884
1885/**
1886 * @opcode 0x66
1887 * @opmnemonic opsize
1888 * @openc prefix
1889 * @opmincpu 80386
1890 * @ophints harmless
1891 * @opgroup op_prefixes
1892 */
1893FNIEMOP_DEF(iemOp_op_size)
1894{
1895 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1896 IEMOP_HLP_MIN_386();
1897
1898 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1899 iemRecalEffOpSize(pVCpu);
1900
1901 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1902 when REPZ or REPNZ are present. */
1903 if (pVCpu->iem.s.idxPrefix == 0)
1904 pVCpu->iem.s.idxPrefix = 1;
1905
1906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1907 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1908}
1909
1910
1911/**
1912 * @opcode 0x67
1913 * @opmnemonic addrsize
1914 * @openc prefix
1915 * @opmincpu 80386
1916 * @ophints harmless
1917 * @opgroup op_prefixes
1918 */
1919FNIEMOP_DEF(iemOp_addr_size)
1920{
1921 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1922 IEMOP_HLP_MIN_386();
1923
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1925 switch (pVCpu->iem.s.enmDefAddrMode)
1926 {
1927 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1928 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1929 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1930 default: AssertFailed();
1931 }
1932
1933 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1934 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1935}
1936
1937
1938/**
1939 * @opcode 0x68
1940 */
1941FNIEMOP_DEF(iemOp_push_Iz)
1942{
1943 IEMOP_MNEMONIC(push_Iz, "push Iz");
1944 IEMOP_HLP_MIN_186();
1945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1946 switch (pVCpu->iem.s.enmEffOpSize)
1947 {
1948 case IEMMODE_16BIT:
1949 {
1950 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1952 IEM_MC_BEGIN(0,0);
1953 IEM_MC_PUSH_U16(u16Imm);
1954 IEM_MC_ADVANCE_RIP();
1955 IEM_MC_END();
1956 return VINF_SUCCESS;
1957 }
1958
1959 case IEMMODE_32BIT:
1960 {
1961 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963 IEM_MC_BEGIN(0,0);
1964 IEM_MC_PUSH_U32(u32Imm);
1965 IEM_MC_ADVANCE_RIP();
1966 IEM_MC_END();
1967 return VINF_SUCCESS;
1968 }
1969
1970 case IEMMODE_64BIT:
1971 {
1972 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 IEM_MC_BEGIN(0,0);
1975 IEM_MC_PUSH_U64(u64Imm);
1976 IEM_MC_ADVANCE_RIP();
1977 IEM_MC_END();
1978 return VINF_SUCCESS;
1979 }
1980
1981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1982 }
1983}
1984
1985
1986/**
1987 * @opcode 0x69
1988 */
1989FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1990{
1991 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1992 IEMOP_HLP_MIN_186();
1993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1994 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1995
1996 switch (pVCpu->iem.s.enmEffOpSize)
1997 {
1998 case IEMMODE_16BIT:
1999 {
2000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2001 {
2002 /* register operand */
2003 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2005
2006 IEM_MC_BEGIN(3, 1);
2007 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2008 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2009 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2010 IEM_MC_LOCAL(uint16_t, u16Tmp);
2011
2012 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2013 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2014 IEM_MC_REF_EFLAGS(pEFlags);
2015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2016 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2017
2018 IEM_MC_ADVANCE_RIP();
2019 IEM_MC_END();
2020 }
2021 else
2022 {
2023 /* memory operand */
2024 IEM_MC_BEGIN(3, 2);
2025 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2026 IEM_MC_ARG(uint16_t, u16Src, 1);
2027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2028 IEM_MC_LOCAL(uint16_t, u16Tmp);
2029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2030
2031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2032 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2033 IEM_MC_ASSIGN(u16Src, u16Imm);
2034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2035 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2036 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2037 IEM_MC_REF_EFLAGS(pEFlags);
2038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2039 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2040
2041 IEM_MC_ADVANCE_RIP();
2042 IEM_MC_END();
2043 }
2044 return VINF_SUCCESS;
2045 }
2046
2047 case IEMMODE_32BIT:
2048 {
2049 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2050 {
2051 /* register operand */
2052 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054
2055 IEM_MC_BEGIN(3, 1);
2056 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2057 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2059 IEM_MC_LOCAL(uint32_t, u32Tmp);
2060
2061 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2063 IEM_MC_REF_EFLAGS(pEFlags);
2064 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2065 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2066
2067 IEM_MC_ADVANCE_RIP();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 /* memory operand */
2073 IEM_MC_BEGIN(3, 2);
2074 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2075 IEM_MC_ARG(uint32_t, u32Src, 1);
2076 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2077 IEM_MC_LOCAL(uint32_t, u32Tmp);
2078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2079
2080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2081 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2082 IEM_MC_ASSIGN(u32Src, u32Imm);
2083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2084 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2085 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2086 IEM_MC_REF_EFLAGS(pEFlags);
2087 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2088 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2089
2090 IEM_MC_ADVANCE_RIP();
2091 IEM_MC_END();
2092 }
2093 return VINF_SUCCESS;
2094 }
2095
2096 case IEMMODE_64BIT:
2097 {
2098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2099 {
2100 /* register operand */
2101 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2103
2104 IEM_MC_BEGIN(3, 1);
2105 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2106 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2108 IEM_MC_LOCAL(uint64_t, u64Tmp);
2109
2110 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2111 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2112 IEM_MC_REF_EFLAGS(pEFlags);
2113 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2114 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2115
2116 IEM_MC_ADVANCE_RIP();
2117 IEM_MC_END();
2118 }
2119 else
2120 {
2121 /* memory operand */
2122 IEM_MC_BEGIN(3, 2);
2123 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2124 IEM_MC_ARG(uint64_t, u64Src, 1);
2125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2126 IEM_MC_LOCAL(uint64_t, u64Tmp);
2127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2128
2129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2130 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2131 IEM_MC_ASSIGN(u64Src, u64Imm);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2134 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2135 IEM_MC_REF_EFLAGS(pEFlags);
2136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2137 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2138
2139 IEM_MC_ADVANCE_RIP();
2140 IEM_MC_END();
2141 }
2142 return VINF_SUCCESS;
2143 }
2144 }
2145 AssertFailedReturn(VERR_IEM_IPE_9);
2146}
2147
2148
2149/**
2150 * @opcode 0x6a
2151 */
2152FNIEMOP_DEF(iemOp_push_Ib)
2153{
2154 IEMOP_MNEMONIC(push_Ib, "push Ib");
2155 IEMOP_HLP_MIN_186();
2156 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2158 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2159
2160 IEM_MC_BEGIN(0,0);
2161 switch (pVCpu->iem.s.enmEffOpSize)
2162 {
2163 case IEMMODE_16BIT:
2164 IEM_MC_PUSH_U16(i8Imm);
2165 break;
2166 case IEMMODE_32BIT:
2167 IEM_MC_PUSH_U32(i8Imm);
2168 break;
2169 case IEMMODE_64BIT:
2170 IEM_MC_PUSH_U64(i8Imm);
2171 break;
2172 }
2173 IEM_MC_ADVANCE_RIP();
2174 IEM_MC_END();
2175 return VINF_SUCCESS;
2176}
2177
2178
2179/**
2180 * @opcode 0x6b
2181 */
2182FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2183{
2184 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2185 IEMOP_HLP_MIN_186();
2186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2188
2189 switch (pVCpu->iem.s.enmEffOpSize)
2190 {
2191 case IEMMODE_16BIT:
2192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2193 {
2194 /* register operand */
2195 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2197
2198 IEM_MC_BEGIN(3, 1);
2199 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2200 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2201 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2202 IEM_MC_LOCAL(uint16_t, u16Tmp);
2203
2204 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2205 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2206 IEM_MC_REF_EFLAGS(pEFlags);
2207 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2208 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2209
2210 IEM_MC_ADVANCE_RIP();
2211 IEM_MC_END();
2212 }
2213 else
2214 {
2215 /* memory operand */
2216 IEM_MC_BEGIN(3, 2);
2217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2218 IEM_MC_ARG(uint16_t, u16Src, 1);
2219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2220 IEM_MC_LOCAL(uint16_t, u16Tmp);
2221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2222
2223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2224 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2225 IEM_MC_ASSIGN(u16Src, u16Imm);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2227 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2228 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2229 IEM_MC_REF_EFLAGS(pEFlags);
2230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2231 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2232
2233 IEM_MC_ADVANCE_RIP();
2234 IEM_MC_END();
2235 }
2236 return VINF_SUCCESS;
2237
2238 case IEMMODE_32BIT:
2239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2240 {
2241 /* register operand */
2242 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2244
2245 IEM_MC_BEGIN(3, 1);
2246 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2247 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2249 IEM_MC_LOCAL(uint32_t, u32Tmp);
2250
2251 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2252 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2253 IEM_MC_REF_EFLAGS(pEFlags);
2254 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2255 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2256
2257 IEM_MC_ADVANCE_RIP();
2258 IEM_MC_END();
2259 }
2260 else
2261 {
2262 /* memory operand */
2263 IEM_MC_BEGIN(3, 2);
2264 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2265 IEM_MC_ARG(uint32_t, u32Src, 1);
2266 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2267 IEM_MC_LOCAL(uint32_t, u32Tmp);
2268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2269
2270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2271 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2272 IEM_MC_ASSIGN(u32Src, u32Imm);
2273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2274 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2275 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2276 IEM_MC_REF_EFLAGS(pEFlags);
2277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2278 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2279
2280 IEM_MC_ADVANCE_RIP();
2281 IEM_MC_END();
2282 }
2283 return VINF_SUCCESS;
2284
2285 case IEMMODE_64BIT:
2286 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2287 {
2288 /* register operand */
2289 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2291
2292 IEM_MC_BEGIN(3, 1);
2293 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2294 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2295 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2296 IEM_MC_LOCAL(uint64_t, u64Tmp);
2297
2298 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2299 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2300 IEM_MC_REF_EFLAGS(pEFlags);
2301 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2303
2304 IEM_MC_ADVANCE_RIP();
2305 IEM_MC_END();
2306 }
2307 else
2308 {
2309 /* memory operand */
2310 IEM_MC_BEGIN(3, 2);
2311 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2312 IEM_MC_ARG(uint64_t, u64Src, 1);
2313 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2314 IEM_MC_LOCAL(uint64_t, u64Tmp);
2315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2316
2317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2318 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2319 IEM_MC_ASSIGN(u64Src, u64Imm);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2322 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2323 IEM_MC_REF_EFLAGS(pEFlags);
2324 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2325 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2326
2327 IEM_MC_ADVANCE_RIP();
2328 IEM_MC_END();
2329 }
2330 return VINF_SUCCESS;
2331 }
2332 AssertFailedReturn(VERR_IEM_IPE_8);
2333}
2334
2335
2336/**
2337 * @opcode 0x6c
2338 */
2339FNIEMOP_DEF(iemOp_insb_Yb_DX)
2340{
2341 IEMOP_HLP_MIN_186();
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2344 {
2345 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2346 switch (pVCpu->iem.s.enmEffAddrMode)
2347 {
2348 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2349 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2350 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2352 }
2353 }
2354 else
2355 {
2356 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2357 switch (pVCpu->iem.s.enmEffAddrMode)
2358 {
2359 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2360 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2361 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2363 }
2364 }
2365}
2366
2367
2368/**
2369 * @opcode 0x6d
2370 */
2371FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2372{
2373 IEMOP_HLP_MIN_186();
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2376 {
2377 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2378 switch (pVCpu->iem.s.enmEffOpSize)
2379 {
2380 case IEMMODE_16BIT:
2381 switch (pVCpu->iem.s.enmEffAddrMode)
2382 {
2383 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2384 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2385 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2387 }
2388 break;
2389 case IEMMODE_64BIT:
2390 case IEMMODE_32BIT:
2391 switch (pVCpu->iem.s.enmEffAddrMode)
2392 {
2393 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2394 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2395 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2396 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2397 }
2398 break;
2399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2400 }
2401 }
2402 else
2403 {
2404 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2405 switch (pVCpu->iem.s.enmEffOpSize)
2406 {
2407 case IEMMODE_16BIT:
2408 switch (pVCpu->iem.s.enmEffAddrMode)
2409 {
2410 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2411 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2412 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2414 }
2415 break;
2416 case IEMMODE_64BIT:
2417 case IEMMODE_32BIT:
2418 switch (pVCpu->iem.s.enmEffAddrMode)
2419 {
2420 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2421 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2422 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2424 }
2425 break;
2426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2427 }
2428 }
2429}
2430
2431
2432/**
2433 * @opcode 0x6e
2434 */
2435FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2436{
2437 IEMOP_HLP_MIN_186();
2438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2439 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2440 {
2441 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2442 switch (pVCpu->iem.s.enmEffAddrMode)
2443 {
2444 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2445 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2446 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2448 }
2449 }
2450 else
2451 {
2452 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2453 switch (pVCpu->iem.s.enmEffAddrMode)
2454 {
2455 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2459 }
2460 }
2461}
2462
2463
2464/**
2465 * @opcode 0x6f
2466 */
2467FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2468{
2469 IEMOP_HLP_MIN_186();
2470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2471 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2472 {
2473 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2474 switch (pVCpu->iem.s.enmEffOpSize)
2475 {
2476 case IEMMODE_16BIT:
2477 switch (pVCpu->iem.s.enmEffAddrMode)
2478 {
2479 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2480 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2481 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2483 }
2484 break;
2485 case IEMMODE_64BIT:
2486 case IEMMODE_32BIT:
2487 switch (pVCpu->iem.s.enmEffAddrMode)
2488 {
2489 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2490 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2491 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2493 }
2494 break;
2495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2496 }
2497 }
2498 else
2499 {
2500 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2501 switch (pVCpu->iem.s.enmEffOpSize)
2502 {
2503 case IEMMODE_16BIT:
2504 switch (pVCpu->iem.s.enmEffAddrMode)
2505 {
2506 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2507 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2508 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2510 }
2511 break;
2512 case IEMMODE_64BIT:
2513 case IEMMODE_32BIT:
2514 switch (pVCpu->iem.s.enmEffAddrMode)
2515 {
2516 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2517 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2518 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2520 }
2521 break;
2522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2523 }
2524 }
2525}
2526
2527
2528/**
2529 * @opcode 0x70
2530 */
2531FNIEMOP_DEF(iemOp_jo_Jb)
2532{
2533 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2534 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2537
2538 IEM_MC_BEGIN(0, 0);
2539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2540 IEM_MC_REL_JMP_S8(i8Imm);
2541 } IEM_MC_ELSE() {
2542 IEM_MC_ADVANCE_RIP();
2543 } IEM_MC_ENDIF();
2544 IEM_MC_END();
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/**
2550 * @opcode 0x71
2551 */
2552FNIEMOP_DEF(iemOp_jno_Jb)
2553{
2554 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2555 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2558
2559 IEM_MC_BEGIN(0, 0);
2560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2561 IEM_MC_ADVANCE_RIP();
2562 } IEM_MC_ELSE() {
2563 IEM_MC_REL_JMP_S8(i8Imm);
2564 } IEM_MC_ENDIF();
2565 IEM_MC_END();
2566 return VINF_SUCCESS;
2567}
2568
2569/**
2570 * @opcode 0x72
2571 */
2572FNIEMOP_DEF(iemOp_jc_Jb)
2573{
2574 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2578
2579 IEM_MC_BEGIN(0, 0);
2580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2581 IEM_MC_REL_JMP_S8(i8Imm);
2582 } IEM_MC_ELSE() {
2583 IEM_MC_ADVANCE_RIP();
2584 } IEM_MC_ENDIF();
2585 IEM_MC_END();
2586 return VINF_SUCCESS;
2587}
2588
2589
2590/**
2591 * @opcode 0x73
2592 */
2593FNIEMOP_DEF(iemOp_jnc_Jb)
2594{
2595 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2596 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2599
2600 IEM_MC_BEGIN(0, 0);
2601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2602 IEM_MC_ADVANCE_RIP();
2603 } IEM_MC_ELSE() {
2604 IEM_MC_REL_JMP_S8(i8Imm);
2605 } IEM_MC_ENDIF();
2606 IEM_MC_END();
2607 return VINF_SUCCESS;
2608}
2609
2610
2611/**
2612 * @opcode 0x74
2613 */
2614FNIEMOP_DEF(iemOp_je_Jb)
2615{
2616 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2617 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2619 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2620
2621 IEM_MC_BEGIN(0, 0);
2622 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2623 IEM_MC_REL_JMP_S8(i8Imm);
2624 } IEM_MC_ELSE() {
2625 IEM_MC_ADVANCE_RIP();
2626 } IEM_MC_ENDIF();
2627 IEM_MC_END();
2628 return VINF_SUCCESS;
2629}
2630
2631
2632/**
2633 * @opcode 0x75
2634 */
2635FNIEMOP_DEF(iemOp_jne_Jb)
2636{
2637 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2638 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2641
2642 IEM_MC_BEGIN(0, 0);
2643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2644 IEM_MC_ADVANCE_RIP();
2645 } IEM_MC_ELSE() {
2646 IEM_MC_REL_JMP_S8(i8Imm);
2647 } IEM_MC_ENDIF();
2648 IEM_MC_END();
2649 return VINF_SUCCESS;
2650}
2651
2652
2653/**
2654 * @opcode 0x76
2655 */
2656FNIEMOP_DEF(iemOp_jbe_Jb)
2657{
2658 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2659 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2662
2663 IEM_MC_BEGIN(0, 0);
2664 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2665 IEM_MC_REL_JMP_S8(i8Imm);
2666 } IEM_MC_ELSE() {
2667 IEM_MC_ADVANCE_RIP();
2668 } IEM_MC_ENDIF();
2669 IEM_MC_END();
2670 return VINF_SUCCESS;
2671}
2672
2673
2674/**
2675 * @opcode 0x77
2676 */
2677FNIEMOP_DEF(iemOp_jnbe_Jb)
2678{
2679 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2680 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2683
2684 IEM_MC_BEGIN(0, 0);
2685 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2686 IEM_MC_ADVANCE_RIP();
2687 } IEM_MC_ELSE() {
2688 IEM_MC_REL_JMP_S8(i8Imm);
2689 } IEM_MC_ENDIF();
2690 IEM_MC_END();
2691 return VINF_SUCCESS;
2692}
2693
2694
2695/**
2696 * @opcode 0x78
2697 */
2698FNIEMOP_DEF(iemOp_js_Jb)
2699{
2700 IEMOP_MNEMONIC(js_Jb, "js Jb");
2701 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2703 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2704
2705 IEM_MC_BEGIN(0, 0);
2706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2707 IEM_MC_REL_JMP_S8(i8Imm);
2708 } IEM_MC_ELSE() {
2709 IEM_MC_ADVANCE_RIP();
2710 } IEM_MC_ENDIF();
2711 IEM_MC_END();
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x79
2718 */
2719FNIEMOP_DEF(iemOp_jns_Jb)
2720{
2721 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2722 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2725
2726 IEM_MC_BEGIN(0, 0);
2727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2728 IEM_MC_ADVANCE_RIP();
2729 } IEM_MC_ELSE() {
2730 IEM_MC_REL_JMP_S8(i8Imm);
2731 } IEM_MC_ENDIF();
2732 IEM_MC_END();
2733 return VINF_SUCCESS;
2734}
2735
2736
2737/**
2738 * @opcode 0x7a
2739 */
2740FNIEMOP_DEF(iemOp_jp_Jb)
2741{
2742 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2743 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2746
2747 IEM_MC_BEGIN(0, 0);
2748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2749 IEM_MC_REL_JMP_S8(i8Imm);
2750 } IEM_MC_ELSE() {
2751 IEM_MC_ADVANCE_RIP();
2752 } IEM_MC_ENDIF();
2753 IEM_MC_END();
2754 return VINF_SUCCESS;
2755}
2756
2757
2758/**
2759 * @opcode 0x7b
2760 */
2761FNIEMOP_DEF(iemOp_jnp_Jb)
2762{
2763 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2764 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2766 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2767
2768 IEM_MC_BEGIN(0, 0);
2769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2770 IEM_MC_ADVANCE_RIP();
2771 } IEM_MC_ELSE() {
2772 IEM_MC_REL_JMP_S8(i8Imm);
2773 } IEM_MC_ENDIF();
2774 IEM_MC_END();
2775 return VINF_SUCCESS;
2776}
2777
2778
2779/**
2780 * @opcode 0x7c
2781 */
2782FNIEMOP_DEF(iemOp_jl_Jb)
2783{
2784 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2785 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2787 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2788
2789 IEM_MC_BEGIN(0, 0);
2790 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2791 IEM_MC_REL_JMP_S8(i8Imm);
2792 } IEM_MC_ELSE() {
2793 IEM_MC_ADVANCE_RIP();
2794 } IEM_MC_ENDIF();
2795 IEM_MC_END();
2796 return VINF_SUCCESS;
2797}
2798
2799
2800/**
2801 * @opcode 0x7d
2802 */
2803FNIEMOP_DEF(iemOp_jnl_Jb)
2804{
2805 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2806 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2808 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2809
2810 IEM_MC_BEGIN(0, 0);
2811 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2812 IEM_MC_ADVANCE_RIP();
2813 } IEM_MC_ELSE() {
2814 IEM_MC_REL_JMP_S8(i8Imm);
2815 } IEM_MC_ENDIF();
2816 IEM_MC_END();
2817 return VINF_SUCCESS;
2818}
2819
2820
2821/**
2822 * @opcode 0x7e
2823 */
2824FNIEMOP_DEF(iemOp_jle_Jb)
2825{
2826 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2827 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2829 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2830
2831 IEM_MC_BEGIN(0, 0);
2832 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2833 IEM_MC_REL_JMP_S8(i8Imm);
2834 } IEM_MC_ELSE() {
2835 IEM_MC_ADVANCE_RIP();
2836 } IEM_MC_ENDIF();
2837 IEM_MC_END();
2838 return VINF_SUCCESS;
2839}
2840
2841
2842/**
2843 * @opcode 0x7f
2844 */
2845FNIEMOP_DEF(iemOp_jnle_Jb)
2846{
2847 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2848 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2850 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2851
2852 IEM_MC_BEGIN(0, 0);
2853 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2854 IEM_MC_ADVANCE_RIP();
2855 } IEM_MC_ELSE() {
2856 IEM_MC_REL_JMP_S8(i8Imm);
2857 } IEM_MC_ENDIF();
2858 IEM_MC_END();
2859 return VINF_SUCCESS;
2860}
2861
2862
2863/**
2864 * @opcode 0x80
2865 */
2866FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2867{
2868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2869 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2870 {
2871 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2872 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2873 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2874 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2875 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2876 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2877 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2878 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2879 }
2880 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2881
2882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2883 {
2884 /* register target */
2885 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2887 IEM_MC_BEGIN(3, 0);
2888 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2889 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2890 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2891
2892 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2893 IEM_MC_REF_EFLAGS(pEFlags);
2894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2895
2896 IEM_MC_ADVANCE_RIP();
2897 IEM_MC_END();
2898 }
2899 else
2900 {
2901 /* memory target */
2902 uint32_t fAccess;
2903 if (pImpl->pfnLockedU8)
2904 fAccess = IEM_ACCESS_DATA_RW;
2905 else /* CMP */
2906 fAccess = IEM_ACCESS_DATA_R;
2907 IEM_MC_BEGIN(3, 2);
2908 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2909 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2911
2912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2913 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2914 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2915 if (pImpl->pfnLockedU8)
2916 IEMOP_HLP_DONE_DECODING();
2917 else
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919
2920 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2921 IEM_MC_FETCH_EFLAGS(EFlags);
2922 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2924 else
2925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2926
2927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2928 IEM_MC_COMMIT_EFLAGS(EFlags);
2929 IEM_MC_ADVANCE_RIP();
2930 IEM_MC_END();
2931 }
2932 return VINF_SUCCESS;
2933}
2934
2935
2936/**
2937 * @opcode 0x81
2938 */
2939FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2940{
2941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2942 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2943 {
2944 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2945 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2946 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2947 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2948 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2949 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2950 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2951 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2952 }
2953 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2954
2955 switch (pVCpu->iem.s.enmEffOpSize)
2956 {
2957 case IEMMODE_16BIT:
2958 {
2959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2960 {
2961 /* register target */
2962 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2964 IEM_MC_BEGIN(3, 0);
2965 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2966 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2968
2969 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2970 IEM_MC_REF_EFLAGS(pEFlags);
2971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2972
2973 IEM_MC_ADVANCE_RIP();
2974 IEM_MC_END();
2975 }
2976 else
2977 {
2978 /* memory target */
2979 uint32_t fAccess;
2980 if (pImpl->pfnLockedU16)
2981 fAccess = IEM_ACCESS_DATA_RW;
2982 else /* CMP, TEST */
2983 fAccess = IEM_ACCESS_DATA_R;
2984 IEM_MC_BEGIN(3, 2);
2985 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2986 IEM_MC_ARG(uint16_t, u16Src, 1);
2987 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2989
2990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2991 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2992 IEM_MC_ASSIGN(u16Src, u16Imm);
2993 if (pImpl->pfnLockedU16)
2994 IEMOP_HLP_DONE_DECODING();
2995 else
2996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2997 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2998 IEM_MC_FETCH_EFLAGS(EFlags);
2999 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3001 else
3002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3003
3004 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3005 IEM_MC_COMMIT_EFLAGS(EFlags);
3006 IEM_MC_ADVANCE_RIP();
3007 IEM_MC_END();
3008 }
3009 break;
3010 }
3011
3012 case IEMMODE_32BIT:
3013 {
3014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3015 {
3016 /* register target */
3017 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3019 IEM_MC_BEGIN(3, 0);
3020 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3021 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3023
3024 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3025 IEM_MC_REF_EFLAGS(pEFlags);
3026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3027 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3028
3029 IEM_MC_ADVANCE_RIP();
3030 IEM_MC_END();
3031 }
3032 else
3033 {
3034 /* memory target */
3035 uint32_t fAccess;
3036 if (pImpl->pfnLockedU32)
3037 fAccess = IEM_ACCESS_DATA_RW;
3038 else /* CMP, TEST */
3039 fAccess = IEM_ACCESS_DATA_R;
3040 IEM_MC_BEGIN(3, 2);
3041 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3042 IEM_MC_ARG(uint32_t, u32Src, 1);
3043 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3045
3046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3047 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3048 IEM_MC_ASSIGN(u32Src, u32Imm);
3049 if (pImpl->pfnLockedU32)
3050 IEMOP_HLP_DONE_DECODING();
3051 else
3052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3053 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3054 IEM_MC_FETCH_EFLAGS(EFlags);
3055 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3056 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3057 else
3058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3059
3060 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3061 IEM_MC_COMMIT_EFLAGS(EFlags);
3062 IEM_MC_ADVANCE_RIP();
3063 IEM_MC_END();
3064 }
3065 break;
3066 }
3067
3068 case IEMMODE_64BIT:
3069 {
3070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3071 {
3072 /* register target */
3073 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_BEGIN(3, 0);
3076 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3077 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3079
3080 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3081 IEM_MC_REF_EFLAGS(pEFlags);
3082 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3083
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 }
3087 else
3088 {
3089 /* memory target */
3090 uint32_t fAccess;
3091 if (pImpl->pfnLockedU64)
3092 fAccess = IEM_ACCESS_DATA_RW;
3093 else /* CMP */
3094 fAccess = IEM_ACCESS_DATA_R;
3095 IEM_MC_BEGIN(3, 2);
3096 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3097 IEM_MC_ARG(uint64_t, u64Src, 1);
3098 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3100
3101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3102 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3103 if (pImpl->pfnLockedU64)
3104 IEMOP_HLP_DONE_DECODING();
3105 else
3106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3107 IEM_MC_ASSIGN(u64Src, u64Imm);
3108 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3109 IEM_MC_FETCH_EFLAGS(EFlags);
3110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3111 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3112 else
3113 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3114
3115 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3116 IEM_MC_COMMIT_EFLAGS(EFlags);
3117 IEM_MC_ADVANCE_RIP();
3118 IEM_MC_END();
3119 }
3120 break;
3121 }
3122 }
3123 return VINF_SUCCESS;
3124}
3125
3126
3127/**
3128 * @opcode 0x82
3129 * @opmnemonic grp1_82
3130 * @opgroup op_groups
3131 */
3132FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3133{
3134 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3135 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3136}
3137
3138
3139/**
3140 * @opcode 0x83
3141 */
3142FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3143{
3144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3146 {
3147 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3148 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3149 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3150 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3151 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3152 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3153 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3154 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3155 }
3156 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3157 to the 386 even if absent in the intel reference manuals and some
3158 3rd party opcode listings. */
3159 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3160
3161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3162 {
3163 /*
3164 * Register target
3165 */
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3167 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3168 switch (pVCpu->iem.s.enmEffOpSize)
3169 {
3170 case IEMMODE_16BIT:
3171 {
3172 IEM_MC_BEGIN(3, 0);
3173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3174 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3176
3177 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_REF_EFLAGS(pEFlags);
3179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3180
3181 IEM_MC_ADVANCE_RIP();
3182 IEM_MC_END();
3183 break;
3184 }
3185
3186 case IEMMODE_32BIT:
3187 {
3188 IEM_MC_BEGIN(3, 0);
3189 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3190 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3191 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3192
3193 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3194 IEM_MC_REF_EFLAGS(pEFlags);
3195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3197
3198 IEM_MC_ADVANCE_RIP();
3199 IEM_MC_END();
3200 break;
3201 }
3202
3203 case IEMMODE_64BIT:
3204 {
3205 IEM_MC_BEGIN(3, 0);
3206 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3207 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3208 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3209
3210 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3211 IEM_MC_REF_EFLAGS(pEFlags);
3212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3213
3214 IEM_MC_ADVANCE_RIP();
3215 IEM_MC_END();
3216 break;
3217 }
3218 }
3219 }
3220 else
3221 {
3222 /*
3223 * Memory target.
3224 */
3225 uint32_t fAccess;
3226 if (pImpl->pfnLockedU16)
3227 fAccess = IEM_ACCESS_DATA_RW;
3228 else /* CMP */
3229 fAccess = IEM_ACCESS_DATA_R;
3230
3231 switch (pVCpu->iem.s.enmEffOpSize)
3232 {
3233 case IEMMODE_16BIT:
3234 {
3235 IEM_MC_BEGIN(3, 2);
3236 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3237 IEM_MC_ARG(uint16_t, u16Src, 1);
3238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3240
3241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3242 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3243 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3244 if (pImpl->pfnLockedU16)
3245 IEMOP_HLP_DONE_DECODING();
3246 else
3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3248 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3249 IEM_MC_FETCH_EFLAGS(EFlags);
3250 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3252 else
3253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3254
3255 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3256 IEM_MC_COMMIT_EFLAGS(EFlags);
3257 IEM_MC_ADVANCE_RIP();
3258 IEM_MC_END();
3259 break;
3260 }
3261
3262 case IEMMODE_32BIT:
3263 {
3264 IEM_MC_BEGIN(3, 2);
3265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3266 IEM_MC_ARG(uint32_t, u32Src, 1);
3267 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3269
3270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3271 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3272 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3273 if (pImpl->pfnLockedU32)
3274 IEMOP_HLP_DONE_DECODING();
3275 else
3276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3277 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3278 IEM_MC_FETCH_EFLAGS(EFlags);
3279 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3281 else
3282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3283
3284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3285 IEM_MC_COMMIT_EFLAGS(EFlags);
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 break;
3289 }
3290
3291 case IEMMODE_64BIT:
3292 {
3293 IEM_MC_BEGIN(3, 2);
3294 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3295 IEM_MC_ARG(uint64_t, u64Src, 1);
3296 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3298
3299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3300 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3301 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3302 if (pImpl->pfnLockedU64)
3303 IEMOP_HLP_DONE_DECODING();
3304 else
3305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3306 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3307 IEM_MC_FETCH_EFLAGS(EFlags);
3308 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3309 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3310 else
3311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3312
3313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3314 IEM_MC_COMMIT_EFLAGS(EFlags);
3315 IEM_MC_ADVANCE_RIP();
3316 IEM_MC_END();
3317 break;
3318 }
3319 }
3320 }
3321 return VINF_SUCCESS;
3322}
3323
3324
3325/**
3326 * @opcode 0x84
3327 */
3328FNIEMOP_DEF(iemOp_test_Eb_Gb)
3329{
3330 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3333}
3334
3335
3336/**
3337 * @opcode 0x85
3338 */
3339FNIEMOP_DEF(iemOp_test_Ev_Gv)
3340{
3341 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3342 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3343 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3344}
3345
3346
3347/**
3348 * @opcode 0x86
3349 */
3350FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3351{
3352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3353 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3354
3355 /*
3356 * If rm is denoting a register, no more instruction bytes.
3357 */
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361
3362 IEM_MC_BEGIN(0, 2);
3363 IEM_MC_LOCAL(uint8_t, uTmp1);
3364 IEM_MC_LOCAL(uint8_t, uTmp2);
3365
3366 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3367 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3368 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3369 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3370
3371 IEM_MC_ADVANCE_RIP();
3372 IEM_MC_END();
3373 }
3374 else
3375 {
3376 /*
3377 * We're accessing memory.
3378 */
3379/** @todo the register must be committed separately! */
3380 IEM_MC_BEGIN(2, 2);
3381 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3382 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3384
3385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3386 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3387 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3388 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3389 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3390
3391 IEM_MC_ADVANCE_RIP();
3392 IEM_MC_END();
3393 }
3394 return VINF_SUCCESS;
3395}
3396
3397
3398/**
3399 * @opcode 0x87
3400 */
3401FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3402{
3403 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3405
3406 /*
3407 * If rm is denoting a register, no more instruction bytes.
3408 */
3409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3410 {
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412
3413 switch (pVCpu->iem.s.enmEffOpSize)
3414 {
3415 case IEMMODE_16BIT:
3416 IEM_MC_BEGIN(0, 2);
3417 IEM_MC_LOCAL(uint16_t, uTmp1);
3418 IEM_MC_LOCAL(uint16_t, uTmp2);
3419
3420 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3421 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3422 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3423 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3424
3425 IEM_MC_ADVANCE_RIP();
3426 IEM_MC_END();
3427 return VINF_SUCCESS;
3428
3429 case IEMMODE_32BIT:
3430 IEM_MC_BEGIN(0, 2);
3431 IEM_MC_LOCAL(uint32_t, uTmp1);
3432 IEM_MC_LOCAL(uint32_t, uTmp2);
3433
3434 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3435 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3436 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3437 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3438
3439 IEM_MC_ADVANCE_RIP();
3440 IEM_MC_END();
3441 return VINF_SUCCESS;
3442
3443 case IEMMODE_64BIT:
3444 IEM_MC_BEGIN(0, 2);
3445 IEM_MC_LOCAL(uint64_t, uTmp1);
3446 IEM_MC_LOCAL(uint64_t, uTmp2);
3447
3448 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3449 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3450 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3451 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3452
3453 IEM_MC_ADVANCE_RIP();
3454 IEM_MC_END();
3455 return VINF_SUCCESS;
3456
3457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3458 }
3459 }
3460 else
3461 {
3462 /*
3463 * We're accessing memory.
3464 */
3465 switch (pVCpu->iem.s.enmEffOpSize)
3466 {
3467/** @todo the register must be committed separately! */
3468 case IEMMODE_16BIT:
3469 IEM_MC_BEGIN(2, 2);
3470 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3471 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3473
3474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3475 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3476 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3477 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3479
3480 IEM_MC_ADVANCE_RIP();
3481 IEM_MC_END();
3482 return VINF_SUCCESS;
3483
3484 case IEMMODE_32BIT:
3485 IEM_MC_BEGIN(2, 2);
3486 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3487 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3489
3490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3491 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3492 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3493 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3494 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3495
3496 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3497 IEM_MC_ADVANCE_RIP();
3498 IEM_MC_END();
3499 return VINF_SUCCESS;
3500
3501 case IEMMODE_64BIT:
3502 IEM_MC_BEGIN(2, 2);
3503 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3504 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3506
3507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3508 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3509 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3510 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3512
3513 IEM_MC_ADVANCE_RIP();
3514 IEM_MC_END();
3515 return VINF_SUCCESS;
3516
3517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3518 }
3519 }
3520}
3521
3522
3523/**
3524 * @opcode 0x88
3525 */
3526FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3527{
3528 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3529
3530 uint8_t bRm;
3531 IEM_OPCODE_GET_NEXT_U8(&bRm);
3532
3533 /*
3534 * If rm is denoting a register, no more instruction bytes.
3535 */
3536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3537 {
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEM_MC_BEGIN(0, 1);
3540 IEM_MC_LOCAL(uint8_t, u8Value);
3541 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3542 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3543 IEM_MC_ADVANCE_RIP();
3544 IEM_MC_END();
3545 }
3546 else
3547 {
3548 /*
3549 * We're writing a register to memory.
3550 */
3551 IEM_MC_BEGIN(0, 2);
3552 IEM_MC_LOCAL(uint8_t, u8Value);
3553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3557 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3558 IEM_MC_ADVANCE_RIP();
3559 IEM_MC_END();
3560 }
3561 return VINF_SUCCESS;
3562
3563}
3564
3565
3566/**
3567 * @opcode 0x89
3568 */
3569FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3570{
3571 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3572
3573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3574
3575 /*
3576 * If rm is denoting a register, no more instruction bytes.
3577 */
3578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3579 {
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 switch (pVCpu->iem.s.enmEffOpSize)
3582 {
3583 case IEMMODE_16BIT:
3584 IEM_MC_BEGIN(0, 1);
3585 IEM_MC_LOCAL(uint16_t, u16Value);
3586 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3587 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3588 IEM_MC_ADVANCE_RIP();
3589 IEM_MC_END();
3590 break;
3591
3592 case IEMMODE_32BIT:
3593 IEM_MC_BEGIN(0, 1);
3594 IEM_MC_LOCAL(uint32_t, u32Value);
3595 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3596 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3597 IEM_MC_ADVANCE_RIP();
3598 IEM_MC_END();
3599 break;
3600
3601 case IEMMODE_64BIT:
3602 IEM_MC_BEGIN(0, 1);
3603 IEM_MC_LOCAL(uint64_t, u64Value);
3604 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3605 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3606 IEM_MC_ADVANCE_RIP();
3607 IEM_MC_END();
3608 break;
3609 }
3610 }
3611 else
3612 {
3613 /*
3614 * We're writing a register to memory.
3615 */
3616 switch (pVCpu->iem.s.enmEffOpSize)
3617 {
3618 case IEMMODE_16BIT:
3619 IEM_MC_BEGIN(0, 2);
3620 IEM_MC_LOCAL(uint16_t, u16Value);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3624 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3625 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3626 IEM_MC_ADVANCE_RIP();
3627 IEM_MC_END();
3628 break;
3629
3630 case IEMMODE_32BIT:
3631 IEM_MC_BEGIN(0, 2);
3632 IEM_MC_LOCAL(uint32_t, u32Value);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3637 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3638 IEM_MC_ADVANCE_RIP();
3639 IEM_MC_END();
3640 break;
3641
3642 case IEMMODE_64BIT:
3643 IEM_MC_BEGIN(0, 2);
3644 IEM_MC_LOCAL(uint64_t, u64Value);
3645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3648 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3649 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3650 IEM_MC_ADVANCE_RIP();
3651 IEM_MC_END();
3652 break;
3653 }
3654 }
3655 return VINF_SUCCESS;
3656}
3657
3658
3659/**
3660 * @opcode 0x8a
3661 */
3662FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3663{
3664 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3665
3666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3667
3668 /*
3669 * If rm is denoting a register, no more instruction bytes.
3670 */
3671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3672 {
3673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3674 IEM_MC_BEGIN(0, 1);
3675 IEM_MC_LOCAL(uint8_t, u8Value);
3676 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3677 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3678 IEM_MC_ADVANCE_RIP();
3679 IEM_MC_END();
3680 }
3681 else
3682 {
3683 /*
3684 * We're loading a register from memory.
3685 */
3686 IEM_MC_BEGIN(0, 2);
3687 IEM_MC_LOCAL(uint8_t, u8Value);
3688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3691 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3692 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3693 IEM_MC_ADVANCE_RIP();
3694 IEM_MC_END();
3695 }
3696 return VINF_SUCCESS;
3697}
3698
3699
3700/**
3701 * @opcode 0x8b
3702 */
3703FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3704{
3705 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3706
3707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3708
3709 /*
3710 * If rm is denoting a register, no more instruction bytes.
3711 */
3712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3713 {
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 switch (pVCpu->iem.s.enmEffOpSize)
3716 {
3717 case IEMMODE_16BIT:
3718 IEM_MC_BEGIN(0, 1);
3719 IEM_MC_LOCAL(uint16_t, u16Value);
3720 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3721 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3722 IEM_MC_ADVANCE_RIP();
3723 IEM_MC_END();
3724 break;
3725
3726 case IEMMODE_32BIT:
3727 IEM_MC_BEGIN(0, 1);
3728 IEM_MC_LOCAL(uint32_t, u32Value);
3729 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3730 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3731 IEM_MC_ADVANCE_RIP();
3732 IEM_MC_END();
3733 break;
3734
3735 case IEMMODE_64BIT:
3736 IEM_MC_BEGIN(0, 1);
3737 IEM_MC_LOCAL(uint64_t, u64Value);
3738 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3739 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 break;
3743 }
3744 }
3745 else
3746 {
3747 /*
3748 * We're loading a register from memory.
3749 */
3750 switch (pVCpu->iem.s.enmEffOpSize)
3751 {
3752 case IEMMODE_16BIT:
3753 IEM_MC_BEGIN(0, 2);
3754 IEM_MC_LOCAL(uint16_t, u16Value);
3755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3758 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3759 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3760 IEM_MC_ADVANCE_RIP();
3761 IEM_MC_END();
3762 break;
3763
3764 case IEMMODE_32BIT:
3765 IEM_MC_BEGIN(0, 2);
3766 IEM_MC_LOCAL(uint32_t, u32Value);
3767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3770 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3771 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 break;
3775
3776 case IEMMODE_64BIT:
3777 IEM_MC_BEGIN(0, 2);
3778 IEM_MC_LOCAL(uint64_t, u64Value);
3779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3782 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3783 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3784 IEM_MC_ADVANCE_RIP();
3785 IEM_MC_END();
3786 break;
3787 }
3788 }
3789 return VINF_SUCCESS;
3790}
3791
3792
3793/**
3794 * opcode 0x63
3795 * @todo Table fixme
3796 */
3797FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3798{
3799 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3800 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3801 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3802 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3803 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3804}
3805
3806
3807/**
3808 * @opcode 0x8c
3809 */
3810FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3811{
3812 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3813
3814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3815
3816 /*
3817 * Check that the destination register exists. The REX.R prefix is ignored.
3818 */
3819 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3820 if ( iSegReg > X86_SREG_GS)
3821 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3822
3823 /*
3824 * If rm is denoting a register, no more instruction bytes.
3825 * In that case, the operand size is respected and the upper bits are
3826 * cleared (starting with some pentium).
3827 */
3828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3829 {
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3831 switch (pVCpu->iem.s.enmEffOpSize)
3832 {
3833 case IEMMODE_16BIT:
3834 IEM_MC_BEGIN(0, 1);
3835 IEM_MC_LOCAL(uint16_t, u16Value);
3836 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3837 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3838 IEM_MC_ADVANCE_RIP();
3839 IEM_MC_END();
3840 break;
3841
3842 case IEMMODE_32BIT:
3843 IEM_MC_BEGIN(0, 1);
3844 IEM_MC_LOCAL(uint32_t, u32Value);
3845 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3846 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3847 IEM_MC_ADVANCE_RIP();
3848 IEM_MC_END();
3849 break;
3850
3851 case IEMMODE_64BIT:
3852 IEM_MC_BEGIN(0, 1);
3853 IEM_MC_LOCAL(uint64_t, u64Value);
3854 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3855 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3856 IEM_MC_ADVANCE_RIP();
3857 IEM_MC_END();
3858 break;
3859 }
3860 }
3861 else
3862 {
3863 /*
3864 * We're saving the register to memory. The access is word sized
3865 * regardless of operand size prefixes.
3866 */
3867#if 0 /* not necessary */
3868 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3869#endif
3870 IEM_MC_BEGIN(0, 2);
3871 IEM_MC_LOCAL(uint16_t, u16Value);
3872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3875 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3876 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 }
3880 return VINF_SUCCESS;
3881}
3882
3883
3884
3885
3886/**
3887 * @opcode 0x8d
3888 */
3889FNIEMOP_DEF(iemOp_lea_Gv_M)
3890{
3891 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3894 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3895
3896 switch (pVCpu->iem.s.enmEffOpSize)
3897 {
3898 case IEMMODE_16BIT:
3899 IEM_MC_BEGIN(0, 2);
3900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3901 IEM_MC_LOCAL(uint16_t, u16Cast);
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3904 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3905 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3906 IEM_MC_ADVANCE_RIP();
3907 IEM_MC_END();
3908 return VINF_SUCCESS;
3909
3910 case IEMMODE_32BIT:
3911 IEM_MC_BEGIN(0, 2);
3912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3913 IEM_MC_LOCAL(uint32_t, u32Cast);
3914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3917 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 return VINF_SUCCESS;
3921
3922 case IEMMODE_64BIT:
3923 IEM_MC_BEGIN(0, 1);
3924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3927 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3928 IEM_MC_ADVANCE_RIP();
3929 IEM_MC_END();
3930 return VINF_SUCCESS;
3931 }
3932 AssertFailedReturn(VERR_IEM_IPE_7);
3933}
3934
3935
3936/**
3937 * @opcode 0x8e
3938 */
3939FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3940{
3941 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3942
3943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3944
3945 /*
3946 * The practical operand size is 16-bit.
3947 */
3948#if 0 /* not necessary */
3949 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3950#endif
3951
3952 /*
3953 * Check that the destination register exists and can be used with this
3954 * instruction. The REX.R prefix is ignored.
3955 */
3956 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3957 if ( iSegReg == X86_SREG_CS
3958 || iSegReg > X86_SREG_GS)
3959 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3960
3961 /*
3962 * If rm is denoting a register, no more instruction bytes.
3963 */
3964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3965 {
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3967 IEM_MC_BEGIN(2, 0);
3968 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3969 IEM_MC_ARG(uint16_t, u16Value, 1);
3970 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3971 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3972 IEM_MC_END();
3973 }
3974 else
3975 {
3976 /*
3977 * We're loading the register from memory. The access is word sized
3978 * regardless of operand size prefixes.
3979 */
3980 IEM_MC_BEGIN(2, 1);
3981 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3982 IEM_MC_ARG(uint16_t, u16Value, 1);
3983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3986 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3987 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3988 IEM_MC_END();
3989 }
3990 return VINF_SUCCESS;
3991}
3992
3993
3994/** Opcode 0x8f /0. */
3995FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3996{
3997 /* This bugger is rather annoying as it requires rSP to be updated before
3998 doing the effective address calculations. Will eventually require a
3999 split between the R/M+SIB decoding and the effective address
4000 calculation - which is something that is required for any attempt at
4001 reusing this code for a recompiler. It may also be good to have if we
4002 need to delay #UD exception caused by invalid lock prefixes.
4003
4004 For now, we'll do a mostly safe interpreter-only implementation here. */
4005 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4006 * now until tests show it's checked.. */
4007 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4008
4009 /* Register access is relatively easy and can share code. */
4010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4011 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4012
4013 /*
4014 * Memory target.
4015 *
4016 * Intel says that RSP is incremented before it's used in any effective
4017 * address calcuations. This means some serious extra annoyance here since
4018 * we decode and calculate the effective address in one step and like to
4019 * delay committing registers till everything is done.
4020 *
4021 * So, we'll decode and calculate the effective address twice. This will
4022 * require some recoding if turned into a recompiler.
4023 */
4024 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4025
4026#ifndef TST_IEM_CHECK_MC
4027 /* Calc effective address with modified ESP. */
4028/** @todo testcase */
4029 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4030 RTGCPTR GCPtrEff;
4031 VBOXSTRICTRC rcStrict;
4032 switch (pVCpu->iem.s.enmEffOpSize)
4033 {
4034 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4035 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4036 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4038 }
4039 if (rcStrict != VINF_SUCCESS)
4040 return rcStrict;
4041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4042
4043 /* Perform the operation - this should be CImpl. */
4044 RTUINT64U TmpRsp;
4045 TmpRsp.u = pCtx->rsp;
4046 switch (pVCpu->iem.s.enmEffOpSize)
4047 {
4048 case IEMMODE_16BIT:
4049 {
4050 uint16_t u16Value;
4051 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4052 if (rcStrict == VINF_SUCCESS)
4053 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4054 break;
4055 }
4056
4057 case IEMMODE_32BIT:
4058 {
4059 uint32_t u32Value;
4060 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4061 if (rcStrict == VINF_SUCCESS)
4062 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4063 break;
4064 }
4065
4066 case IEMMODE_64BIT:
4067 {
4068 uint64_t u64Value;
4069 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4070 if (rcStrict == VINF_SUCCESS)
4071 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4072 break;
4073 }
4074
4075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4076 }
4077 if (rcStrict == VINF_SUCCESS)
4078 {
4079 pCtx->rsp = TmpRsp.u;
4080 iemRegUpdateRipAndClearRF(pVCpu);
4081 }
4082 return rcStrict;
4083
4084#else
4085 return VERR_IEM_IPE_2;
4086#endif
4087}
4088
4089
4090/**
4091 * @opcode 0x8f
4092 */
4093FNIEMOP_DEF(iemOp_Grp1A__xop)
4094{
4095 /*
4096 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4097 * three byte VEX prefix, except that the mmmmm field cannot have the values
4098 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4099 */
4100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4101 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4102 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4103
4104 IEMOP_MNEMONIC(xop, "xop");
4105 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4106 {
4107 /** @todo Test when exctly the XOP conformance checks kick in during
4108 * instruction decoding and fetching (using \#PF). */
4109 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4110 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4111 if ( ( pVCpu->iem.s.fPrefixes
4112 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4113 == 0)
4114 {
4115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4116 if (bXop2 & 0x80 /* XOP.W */)
4117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4118 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4119 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4120 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4121 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4122 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4123 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4124
4125 /** @todo XOP: Just use new tables and decoders. */
4126 switch (bRm & 0x1f)
4127 {
4128 case 8: /* xop opcode map 8. */
4129 IEMOP_BITCH_ABOUT_STUB();
4130 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4131
4132 case 9: /* xop opcode map 9. */
4133 IEMOP_BITCH_ABOUT_STUB();
4134 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4135
4136 case 10: /* xop opcode map 10. */
4137 IEMOP_BITCH_ABOUT_STUB();
4138 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4139
4140 default:
4141 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4142 return IEMOP_RAISE_INVALID_OPCODE();
4143 }
4144 }
4145 else
4146 Log(("XOP: Invalid prefix mix!\n"));
4147 }
4148 else
4149 Log(("XOP: XOP support disabled!\n"));
4150 return IEMOP_RAISE_INVALID_OPCODE();
4151}
4152
4153
4154/**
4155 * Common 'xchg reg,rAX' helper.
4156 */
4157FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4158{
4159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4160
4161 iReg |= pVCpu->iem.s.uRexB;
4162 switch (pVCpu->iem.s.enmEffOpSize)
4163 {
4164 case IEMMODE_16BIT:
4165 IEM_MC_BEGIN(0, 2);
4166 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4167 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4168 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4169 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4170 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4171 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4172 IEM_MC_ADVANCE_RIP();
4173 IEM_MC_END();
4174 return VINF_SUCCESS;
4175
4176 case IEMMODE_32BIT:
4177 IEM_MC_BEGIN(0, 2);
4178 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4179 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4180 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4181 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4182 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4183 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4184 IEM_MC_ADVANCE_RIP();
4185 IEM_MC_END();
4186 return VINF_SUCCESS;
4187
4188 case IEMMODE_64BIT:
4189 IEM_MC_BEGIN(0, 2);
4190 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4191 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4192 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4193 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4194 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4195 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4196 IEM_MC_ADVANCE_RIP();
4197 IEM_MC_END();
4198 return VINF_SUCCESS;
4199
4200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4201 }
4202}
4203
4204
4205/**
4206 * @opcode 0x90
4207 */
4208FNIEMOP_DEF(iemOp_nop)
4209{
4210 /* R8/R8D and RAX/EAX can be exchanged. */
4211 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4212 {
4213 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4214 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4215 }
4216
4217 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4218 IEMOP_MNEMONIC(pause, "pause");
4219 else
4220 IEMOP_MNEMONIC(nop, "nop");
4221 IEM_MC_BEGIN(0, 0);
4222 IEM_MC_ADVANCE_RIP();
4223 IEM_MC_END();
4224 return VINF_SUCCESS;
4225}
4226
4227
4228/**
4229 * @opcode 0x91
4230 */
4231FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4232{
4233 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4234 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4235}
4236
4237
4238/**
4239 * @opcode 0x92
4240 */
4241FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4242{
4243 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4244 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4245}
4246
4247
4248/**
4249 * @opcode 0x93
4250 */
4251FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4252{
4253 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4254 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4255}
4256
4257
4258/**
4259 * @opcode 0x94
4260 */
4261FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4262{
4263 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4264 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4265}
4266
4267
4268/**
4269 * @opcode 0x95
4270 */
4271FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4272{
4273 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4274 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4275}
4276
4277
4278/**
4279 * @opcode 0x96
4280 */
4281FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4282{
4283 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4284 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4285}
4286
4287
4288/**
4289 * @opcode 0x97
4290 */
4291FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4292{
4293 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4294 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4295}
4296
4297
4298/**
4299 * @opcode 0x98
4300 */
4301FNIEMOP_DEF(iemOp_cbw)
4302{
4303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4304 switch (pVCpu->iem.s.enmEffOpSize)
4305 {
4306 case IEMMODE_16BIT:
4307 IEMOP_MNEMONIC(cbw, "cbw");
4308 IEM_MC_BEGIN(0, 1);
4309 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4310 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4311 } IEM_MC_ELSE() {
4312 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4313 } IEM_MC_ENDIF();
4314 IEM_MC_ADVANCE_RIP();
4315 IEM_MC_END();
4316 return VINF_SUCCESS;
4317
4318 case IEMMODE_32BIT:
4319 IEMOP_MNEMONIC(cwde, "cwde");
4320 IEM_MC_BEGIN(0, 1);
4321 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4322 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4323 } IEM_MC_ELSE() {
4324 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4325 } IEM_MC_ENDIF();
4326 IEM_MC_ADVANCE_RIP();
4327 IEM_MC_END();
4328 return VINF_SUCCESS;
4329
4330 case IEMMODE_64BIT:
4331 IEMOP_MNEMONIC(cdqe, "cdqe");
4332 IEM_MC_BEGIN(0, 1);
4333 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4334 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4335 } IEM_MC_ELSE() {
4336 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4337 } IEM_MC_ENDIF();
4338 IEM_MC_ADVANCE_RIP();
4339 IEM_MC_END();
4340 return VINF_SUCCESS;
4341
4342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4343 }
4344}
4345
4346
4347/**
4348 * @opcode 0x99
4349 */
4350FNIEMOP_DEF(iemOp_cwd)
4351{
4352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4353 switch (pVCpu->iem.s.enmEffOpSize)
4354 {
4355 case IEMMODE_16BIT:
4356 IEMOP_MNEMONIC(cwd, "cwd");
4357 IEM_MC_BEGIN(0, 1);
4358 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4359 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4360 } IEM_MC_ELSE() {
4361 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4362 } IEM_MC_ENDIF();
4363 IEM_MC_ADVANCE_RIP();
4364 IEM_MC_END();
4365 return VINF_SUCCESS;
4366
4367 case IEMMODE_32BIT:
4368 IEMOP_MNEMONIC(cdq, "cdq");
4369 IEM_MC_BEGIN(0, 1);
4370 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4371 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4372 } IEM_MC_ELSE() {
4373 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4374 } IEM_MC_ENDIF();
4375 IEM_MC_ADVANCE_RIP();
4376 IEM_MC_END();
4377 return VINF_SUCCESS;
4378
4379 case IEMMODE_64BIT:
4380 IEMOP_MNEMONIC(cqo, "cqo");
4381 IEM_MC_BEGIN(0, 1);
4382 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4383 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4384 } IEM_MC_ELSE() {
4385 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4386 } IEM_MC_ENDIF();
4387 IEM_MC_ADVANCE_RIP();
4388 IEM_MC_END();
4389 return VINF_SUCCESS;
4390
4391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4392 }
4393}
4394
4395
4396/**
4397 * @opcode 0x9a
4398 */
4399FNIEMOP_DEF(iemOp_call_Ap)
4400{
4401 IEMOP_MNEMONIC(call_Ap, "call Ap");
4402 IEMOP_HLP_NO_64BIT();
4403
4404 /* Decode the far pointer address and pass it on to the far call C implementation. */
4405 uint32_t offSeg;
4406 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4407 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4408 else
4409 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4410 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4412 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4413}
4414
4415
4416/** Opcode 0x9b. (aka fwait) */
4417FNIEMOP_DEF(iemOp_wait)
4418{
4419 IEMOP_MNEMONIC(wait, "wait");
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4421
4422 IEM_MC_BEGIN(0, 0);
4423 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4425 IEM_MC_ADVANCE_RIP();
4426 IEM_MC_END();
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/**
4432 * @opcode 0x9c
4433 */
4434FNIEMOP_DEF(iemOp_pushf_Fv)
4435{
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4438 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4439}
4440
4441
4442/**
4443 * @opcode 0x9d
4444 */
4445FNIEMOP_DEF(iemOp_popf_Fv)
4446{
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4449 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4450}
4451
4452
4453/**
4454 * @opcode 0x9e
4455 */
4456FNIEMOP_DEF(iemOp_sahf)
4457{
4458 IEMOP_MNEMONIC(sahf, "sahf");
4459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4460 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4461 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4462 return IEMOP_RAISE_INVALID_OPCODE();
4463 IEM_MC_BEGIN(0, 2);
4464 IEM_MC_LOCAL(uint32_t, u32Flags);
4465 IEM_MC_LOCAL(uint32_t, EFlags);
4466 IEM_MC_FETCH_EFLAGS(EFlags);
4467 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4468 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4469 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4470 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4471 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4472 IEM_MC_COMMIT_EFLAGS(EFlags);
4473 IEM_MC_ADVANCE_RIP();
4474 IEM_MC_END();
4475 return VINF_SUCCESS;
4476}
4477
4478
4479/**
4480 * @opcode 0x9f
4481 */
4482FNIEMOP_DEF(iemOp_lahf)
4483{
4484 IEMOP_MNEMONIC(lahf, "lahf");
4485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4486 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4487 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4488 return IEMOP_RAISE_INVALID_OPCODE();
4489 IEM_MC_BEGIN(0, 1);
4490 IEM_MC_LOCAL(uint8_t, u8Flags);
4491 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4492 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4493 IEM_MC_ADVANCE_RIP();
4494 IEM_MC_END();
4495 return VINF_SUCCESS;
4496}
4497
4498
4499/**
4500 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4501 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4502 * prefixes. Will return on failures.
4503 * @param a_GCPtrMemOff The variable to store the offset in.
4504 */
4505#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4506 do \
4507 { \
4508 switch (pVCpu->iem.s.enmEffAddrMode) \
4509 { \
4510 case IEMMODE_16BIT: \
4511 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4512 break; \
4513 case IEMMODE_32BIT: \
4514 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4515 break; \
4516 case IEMMODE_64BIT: \
4517 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4518 break; \
4519 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4520 } \
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4522 } while (0)
4523
4524/**
4525 * @opcode 0xa0
4526 */
4527FNIEMOP_DEF(iemOp_mov_AL_Ob)
4528{
4529 /*
4530 * Get the offset and fend of lock prefixes.
4531 */
4532 RTGCPTR GCPtrMemOff;
4533 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4534
4535 /*
4536 * Fetch AL.
4537 */
4538 IEM_MC_BEGIN(0,1);
4539 IEM_MC_LOCAL(uint8_t, u8Tmp);
4540 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4541 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4542 IEM_MC_ADVANCE_RIP();
4543 IEM_MC_END();
4544 return VINF_SUCCESS;
4545}
4546
4547
4548/**
4549 * @opcode 0xa1
4550 */
4551FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4552{
4553 /*
4554 * Get the offset and fend of lock prefixes.
4555 */
4556 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4557 RTGCPTR GCPtrMemOff;
4558 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4559
4560 /*
4561 * Fetch rAX.
4562 */
4563 switch (pVCpu->iem.s.enmEffOpSize)
4564 {
4565 case IEMMODE_16BIT:
4566 IEM_MC_BEGIN(0,1);
4567 IEM_MC_LOCAL(uint16_t, u16Tmp);
4568 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4569 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4570 IEM_MC_ADVANCE_RIP();
4571 IEM_MC_END();
4572 return VINF_SUCCESS;
4573
4574 case IEMMODE_32BIT:
4575 IEM_MC_BEGIN(0,1);
4576 IEM_MC_LOCAL(uint32_t, u32Tmp);
4577 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4578 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4579 IEM_MC_ADVANCE_RIP();
4580 IEM_MC_END();
4581 return VINF_SUCCESS;
4582
4583 case IEMMODE_64BIT:
4584 IEM_MC_BEGIN(0,1);
4585 IEM_MC_LOCAL(uint64_t, u64Tmp);
4586 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4587 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4588 IEM_MC_ADVANCE_RIP();
4589 IEM_MC_END();
4590 return VINF_SUCCESS;
4591
4592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4593 }
4594}
4595
4596
4597/**
4598 * @opcode 0xa2
4599 */
4600FNIEMOP_DEF(iemOp_mov_Ob_AL)
4601{
4602 /*
4603 * Get the offset and fend of lock prefixes.
4604 */
4605 RTGCPTR GCPtrMemOff;
4606 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4607
4608 /*
4609 * Store AL.
4610 */
4611 IEM_MC_BEGIN(0,1);
4612 IEM_MC_LOCAL(uint8_t, u8Tmp);
4613 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4614 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 return VINF_SUCCESS;
4618}
4619
4620
4621/**
4622 * @opcode 0xa3
4623 */
4624FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4625{
4626 /*
4627 * Get the offset and fend of lock prefixes.
4628 */
4629 RTGCPTR GCPtrMemOff;
4630 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4631
4632 /*
4633 * Store rAX.
4634 */
4635 switch (pVCpu->iem.s.enmEffOpSize)
4636 {
4637 case IEMMODE_16BIT:
4638 IEM_MC_BEGIN(0,1);
4639 IEM_MC_LOCAL(uint16_t, u16Tmp);
4640 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4641 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4642 IEM_MC_ADVANCE_RIP();
4643 IEM_MC_END();
4644 return VINF_SUCCESS;
4645
4646 case IEMMODE_32BIT:
4647 IEM_MC_BEGIN(0,1);
4648 IEM_MC_LOCAL(uint32_t, u32Tmp);
4649 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4650 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4651 IEM_MC_ADVANCE_RIP();
4652 IEM_MC_END();
4653 return VINF_SUCCESS;
4654
4655 case IEMMODE_64BIT:
4656 IEM_MC_BEGIN(0,1);
4657 IEM_MC_LOCAL(uint64_t, u64Tmp);
4658 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4659 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4660 IEM_MC_ADVANCE_RIP();
4661 IEM_MC_END();
4662 return VINF_SUCCESS;
4663
4664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4665 }
4666}
4667
4668/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4669#define IEM_MOVS_CASE(ValBits, AddrBits) \
4670 IEM_MC_BEGIN(0, 2); \
4671 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4672 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4673 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4674 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4675 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4676 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4677 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4678 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4679 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4680 } IEM_MC_ELSE() { \
4681 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4682 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4683 } IEM_MC_ENDIF(); \
4684 IEM_MC_ADVANCE_RIP(); \
4685 IEM_MC_END();
4686
4687/**
4688 * @opcode 0xa4
4689 */
4690FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4691{
4692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4693
4694 /*
4695 * Use the C implementation if a repeat prefix is encountered.
4696 */
4697 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4698 {
4699 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4700 switch (pVCpu->iem.s.enmEffAddrMode)
4701 {
4702 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4703 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4704 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4706 }
4707 }
4708 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4709
4710 /*
4711 * Sharing case implementation with movs[wdq] below.
4712 */
4713 switch (pVCpu->iem.s.enmEffAddrMode)
4714 {
4715 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4716 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4717 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4719 }
4720 return VINF_SUCCESS;
4721}
4722
4723
4724/**
4725 * @opcode 0xa5
4726 */
4727FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4728{
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730
4731 /*
4732 * Use the C implementation if a repeat prefix is encountered.
4733 */
4734 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4735 {
4736 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4737 switch (pVCpu->iem.s.enmEffOpSize)
4738 {
4739 case IEMMODE_16BIT:
4740 switch (pVCpu->iem.s.enmEffAddrMode)
4741 {
4742 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4743 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4744 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4746 }
4747 break;
4748 case IEMMODE_32BIT:
4749 switch (pVCpu->iem.s.enmEffAddrMode)
4750 {
4751 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4752 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4753 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4755 }
4756 case IEMMODE_64BIT:
4757 switch (pVCpu->iem.s.enmEffAddrMode)
4758 {
4759 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4760 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4761 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4763 }
4764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4765 }
4766 }
4767 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4768
4769 /*
4770 * Annoying double switch here.
4771 * Using ugly macro for implementing the cases, sharing it with movsb.
4772 */
4773 switch (pVCpu->iem.s.enmEffOpSize)
4774 {
4775 case IEMMODE_16BIT:
4776 switch (pVCpu->iem.s.enmEffAddrMode)
4777 {
4778 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4779 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4780 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4782 }
4783 break;
4784
4785 case IEMMODE_32BIT:
4786 switch (pVCpu->iem.s.enmEffAddrMode)
4787 {
4788 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4789 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4790 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4792 }
4793 break;
4794
4795 case IEMMODE_64BIT:
4796 switch (pVCpu->iem.s.enmEffAddrMode)
4797 {
4798 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4799 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4800 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4802 }
4803 break;
4804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4805 }
4806 return VINF_SUCCESS;
4807}
4808
4809#undef IEM_MOVS_CASE
4810
4811/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4812#define IEM_CMPS_CASE(ValBits, AddrBits) \
4813 IEM_MC_BEGIN(3, 3); \
4814 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4815 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4816 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4817 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4818 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4819 \
4820 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4821 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4822 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4823 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4824 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4825 IEM_MC_REF_EFLAGS(pEFlags); \
4826 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4827 \
4828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4829 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4830 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4831 } IEM_MC_ELSE() { \
4832 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4833 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4834 } IEM_MC_ENDIF(); \
4835 IEM_MC_ADVANCE_RIP(); \
4836 IEM_MC_END(); \
4837
4838/**
4839 * @opcode 0xa6
4840 */
4841FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4842{
4843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4844
4845 /*
4846 * Use the C implementation if a repeat prefix is encountered.
4847 */
4848 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4849 {
4850 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4851 switch (pVCpu->iem.s.enmEffAddrMode)
4852 {
4853 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4854 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4855 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4857 }
4858 }
4859 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4860 {
4861 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4862 switch (pVCpu->iem.s.enmEffAddrMode)
4863 {
4864 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4865 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4866 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4868 }
4869 }
4870 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4871
4872 /*
4873 * Sharing case implementation with cmps[wdq] below.
4874 */
4875 switch (pVCpu->iem.s.enmEffAddrMode)
4876 {
4877 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4878 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4879 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4881 }
4882 return VINF_SUCCESS;
4883
4884}
4885
4886
4887/**
4888 * @opcode 0xa7
4889 */
4890FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4891{
4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4893
4894 /*
4895 * Use the C implementation if a repeat prefix is encountered.
4896 */
4897 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4898 {
4899 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4900 switch (pVCpu->iem.s.enmEffOpSize)
4901 {
4902 case IEMMODE_16BIT:
4903 switch (pVCpu->iem.s.enmEffAddrMode)
4904 {
4905 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4906 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4907 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4909 }
4910 break;
4911 case IEMMODE_32BIT:
4912 switch (pVCpu->iem.s.enmEffAddrMode)
4913 {
4914 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4915 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4916 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4918 }
4919 case IEMMODE_64BIT:
4920 switch (pVCpu->iem.s.enmEffAddrMode)
4921 {
4922 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4923 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4924 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4926 }
4927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4928 }
4929 }
4930
4931 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4932 {
4933 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4934 switch (pVCpu->iem.s.enmEffOpSize)
4935 {
4936 case IEMMODE_16BIT:
4937 switch (pVCpu->iem.s.enmEffAddrMode)
4938 {
4939 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4940 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4941 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4943 }
4944 break;
4945 case IEMMODE_32BIT:
4946 switch (pVCpu->iem.s.enmEffAddrMode)
4947 {
4948 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4949 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4950 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4952 }
4953 case IEMMODE_64BIT:
4954 switch (pVCpu->iem.s.enmEffAddrMode)
4955 {
4956 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4957 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4958 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4960 }
4961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4962 }
4963 }
4964
4965 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4966
4967 /*
4968 * Annoying double switch here.
4969 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4970 */
4971 switch (pVCpu->iem.s.enmEffOpSize)
4972 {
4973 case IEMMODE_16BIT:
4974 switch (pVCpu->iem.s.enmEffAddrMode)
4975 {
4976 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4977 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4978 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4980 }
4981 break;
4982
4983 case IEMMODE_32BIT:
4984 switch (pVCpu->iem.s.enmEffAddrMode)
4985 {
4986 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4987 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4988 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4990 }
4991 break;
4992
4993 case IEMMODE_64BIT:
4994 switch (pVCpu->iem.s.enmEffAddrMode)
4995 {
4996 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4997 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4998 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5000 }
5001 break;
5002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5003 }
5004 return VINF_SUCCESS;
5005
5006}
5007
5008#undef IEM_CMPS_CASE
5009
5010/**
5011 * @opcode 0xa8
5012 */
5013FNIEMOP_DEF(iemOp_test_AL_Ib)
5014{
5015 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5016 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5017 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5018}
5019
5020
5021/**
5022 * @opcode 0xa9
5023 */
5024FNIEMOP_DEF(iemOp_test_eAX_Iz)
5025{
5026 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5027 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5028 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5029}
5030
5031
5032/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5033#define IEM_STOS_CASE(ValBits, AddrBits) \
5034 IEM_MC_BEGIN(0, 2); \
5035 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5036 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5037 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5038 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5039 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5040 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5041 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5042 } IEM_MC_ELSE() { \
5043 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5044 } IEM_MC_ENDIF(); \
5045 IEM_MC_ADVANCE_RIP(); \
5046 IEM_MC_END(); \
5047
5048/**
5049 * @opcode 0xaa
5050 */
5051FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5052{
5053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5054
5055 /*
5056 * Use the C implementation if a repeat prefix is encountered.
5057 */
5058 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5059 {
5060 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5061 switch (pVCpu->iem.s.enmEffAddrMode)
5062 {
5063 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5064 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5065 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5067 }
5068 }
5069 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5070
5071 /*
5072 * Sharing case implementation with stos[wdq] below.
5073 */
5074 switch (pVCpu->iem.s.enmEffAddrMode)
5075 {
5076 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5077 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5078 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 return VINF_SUCCESS;
5082}
5083
5084
5085/**
5086 * @opcode 0xab
5087 */
5088FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5089{
5090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5091
5092 /*
5093 * Use the C implementation if a repeat prefix is encountered.
5094 */
5095 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5096 {
5097 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5098 switch (pVCpu->iem.s.enmEffOpSize)
5099 {
5100 case IEMMODE_16BIT:
5101 switch (pVCpu->iem.s.enmEffAddrMode)
5102 {
5103 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5104 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5105 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5107 }
5108 break;
5109 case IEMMODE_32BIT:
5110 switch (pVCpu->iem.s.enmEffAddrMode)
5111 {
5112 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5113 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5114 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5116 }
5117 case IEMMODE_64BIT:
5118 switch (pVCpu->iem.s.enmEffAddrMode)
5119 {
5120 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5121 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5122 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5124 }
5125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5126 }
5127 }
5128 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5129
5130 /*
5131 * Annoying double switch here.
5132 * Using ugly macro for implementing the cases, sharing it with stosb.
5133 */
5134 switch (pVCpu->iem.s.enmEffOpSize)
5135 {
5136 case IEMMODE_16BIT:
5137 switch (pVCpu->iem.s.enmEffAddrMode)
5138 {
5139 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5140 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5141 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5143 }
5144 break;
5145
5146 case IEMMODE_32BIT:
5147 switch (pVCpu->iem.s.enmEffAddrMode)
5148 {
5149 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5150 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5151 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5153 }
5154 break;
5155
5156 case IEMMODE_64BIT:
5157 switch (pVCpu->iem.s.enmEffAddrMode)
5158 {
5159 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5160 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5161 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5163 }
5164 break;
5165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5166 }
5167 return VINF_SUCCESS;
5168}
5169
5170#undef IEM_STOS_CASE
5171
5172/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5173#define IEM_LODS_CASE(ValBits, AddrBits) \
5174 IEM_MC_BEGIN(0, 2); \
5175 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5176 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5177 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5178 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5179 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5180 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5181 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5182 } IEM_MC_ELSE() { \
5183 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5184 } IEM_MC_ENDIF(); \
5185 IEM_MC_ADVANCE_RIP(); \
5186 IEM_MC_END();
5187
5188/**
5189 * @opcode 0xac
5190 */
5191FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5192{
5193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5194
5195 /*
5196 * Use the C implementation if a repeat prefix is encountered.
5197 */
5198 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5199 {
5200 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5201 switch (pVCpu->iem.s.enmEffAddrMode)
5202 {
5203 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5204 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5205 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5207 }
5208 }
5209 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5210
5211 /*
5212 * Sharing case implementation with stos[wdq] below.
5213 */
5214 switch (pVCpu->iem.s.enmEffAddrMode)
5215 {
5216 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5217 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5218 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5220 }
5221 return VINF_SUCCESS;
5222}
5223
5224
5225/**
5226 * @opcode 0xad
5227 */
5228FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5229{
5230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5231
5232 /*
5233 * Use the C implementation if a repeat prefix is encountered.
5234 */
5235 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5236 {
5237 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5238 switch (pVCpu->iem.s.enmEffOpSize)
5239 {
5240 case IEMMODE_16BIT:
5241 switch (pVCpu->iem.s.enmEffAddrMode)
5242 {
5243 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5244 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5245 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5247 }
5248 break;
5249 case IEMMODE_32BIT:
5250 switch (pVCpu->iem.s.enmEffAddrMode)
5251 {
5252 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5253 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5254 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5256 }
5257 case IEMMODE_64BIT:
5258 switch (pVCpu->iem.s.enmEffAddrMode)
5259 {
5260 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5261 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5262 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5264 }
5265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5266 }
5267 }
5268 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5269
5270 /*
5271 * Annoying double switch here.
5272 * Using ugly macro for implementing the cases, sharing it with lodsb.
5273 */
5274 switch (pVCpu->iem.s.enmEffOpSize)
5275 {
5276 case IEMMODE_16BIT:
5277 switch (pVCpu->iem.s.enmEffAddrMode)
5278 {
5279 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5280 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5281 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5283 }
5284 break;
5285
5286 case IEMMODE_32BIT:
5287 switch (pVCpu->iem.s.enmEffAddrMode)
5288 {
5289 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5290 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5291 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5293 }
5294 break;
5295
5296 case IEMMODE_64BIT:
5297 switch (pVCpu->iem.s.enmEffAddrMode)
5298 {
5299 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5300 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5301 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5303 }
5304 break;
5305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5306 }
5307 return VINF_SUCCESS;
5308}
5309
5310#undef IEM_LODS_CASE
5311
5312/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5313#define IEM_SCAS_CASE(ValBits, AddrBits) \
5314 IEM_MC_BEGIN(3, 2); \
5315 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5316 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5317 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5318 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5319 \
5320 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5321 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5322 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5323 IEM_MC_REF_EFLAGS(pEFlags); \
5324 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5325 \
5326 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5327 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5328 } IEM_MC_ELSE() { \
5329 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5330 } IEM_MC_ENDIF(); \
5331 IEM_MC_ADVANCE_RIP(); \
5332 IEM_MC_END();
5333
5334/**
5335 * @opcode 0xae
5336 */
5337FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5338{
5339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5340
5341 /*
5342 * Use the C implementation if a repeat prefix is encountered.
5343 */
5344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5345 {
5346 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5347 switch (pVCpu->iem.s.enmEffAddrMode)
5348 {
5349 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5350 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5351 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354 }
5355 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5356 {
5357 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5358 switch (pVCpu->iem.s.enmEffAddrMode)
5359 {
5360 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5361 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5362 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5364 }
5365 }
5366 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5367
5368 /*
5369 * Sharing case implementation with stos[wdq] below.
5370 */
5371 switch (pVCpu->iem.s.enmEffAddrMode)
5372 {
5373 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5374 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5375 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5377 }
5378 return VINF_SUCCESS;
5379}
5380
5381
5382/**
5383 * @opcode 0xaf
5384 */
5385FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5386{
5387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5388
5389 /*
5390 * Use the C implementation if a repeat prefix is encountered.
5391 */
5392 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5393 {
5394 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5395 switch (pVCpu->iem.s.enmEffOpSize)
5396 {
5397 case IEMMODE_16BIT:
5398 switch (pVCpu->iem.s.enmEffAddrMode)
5399 {
5400 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5401 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5402 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5404 }
5405 break;
5406 case IEMMODE_32BIT:
5407 switch (pVCpu->iem.s.enmEffAddrMode)
5408 {
5409 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5410 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5411 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5413 }
5414 case IEMMODE_64BIT:
5415 switch (pVCpu->iem.s.enmEffAddrMode)
5416 {
5417 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5418 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5419 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5421 }
5422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5423 }
5424 }
5425 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5426 {
5427 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5428 switch (pVCpu->iem.s.enmEffOpSize)
5429 {
5430 case IEMMODE_16BIT:
5431 switch (pVCpu->iem.s.enmEffAddrMode)
5432 {
5433 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5434 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5435 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5437 }
5438 break;
5439 case IEMMODE_32BIT:
5440 switch (pVCpu->iem.s.enmEffAddrMode)
5441 {
5442 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5443 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5444 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5446 }
5447 case IEMMODE_64BIT:
5448 switch (pVCpu->iem.s.enmEffAddrMode)
5449 {
5450 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5451 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5452 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5454 }
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457 }
5458 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5459
5460 /*
5461 * Annoying double switch here.
5462 * Using ugly macro for implementing the cases, sharing it with scasb.
5463 */
5464 switch (pVCpu->iem.s.enmEffOpSize)
5465 {
5466 case IEMMODE_16BIT:
5467 switch (pVCpu->iem.s.enmEffAddrMode)
5468 {
5469 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5470 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5471 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5473 }
5474 break;
5475
5476 case IEMMODE_32BIT:
5477 switch (pVCpu->iem.s.enmEffAddrMode)
5478 {
5479 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5480 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5481 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5483 }
5484 break;
5485
5486 case IEMMODE_64BIT:
5487 switch (pVCpu->iem.s.enmEffAddrMode)
5488 {
5489 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5490 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5491 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5493 }
5494 break;
5495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5496 }
5497 return VINF_SUCCESS;
5498}
5499
5500#undef IEM_SCAS_CASE
5501
5502/**
5503 * Common 'mov r8, imm8' helper.
5504 */
5505FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5506{
5507 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5509
5510 IEM_MC_BEGIN(0, 1);
5511 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5512 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515
5516 return VINF_SUCCESS;
5517}
5518
5519
5520/**
5521 * @opcode 0xb0
5522 */
5523FNIEMOP_DEF(iemOp_mov_AL_Ib)
5524{
5525 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5526 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5527}
5528
5529
5530/**
5531 * @opcode 0xb1
5532 */
5533FNIEMOP_DEF(iemOp_CL_Ib)
5534{
5535 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5536 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5537}
5538
5539
5540/**
5541 * @opcode 0xb2
5542 */
5543FNIEMOP_DEF(iemOp_DL_Ib)
5544{
5545 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5546 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5547}
5548
5549
5550/**
5551 * @opcode 0xb3
5552 */
5553FNIEMOP_DEF(iemOp_BL_Ib)
5554{
5555 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5556 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5557}
5558
5559
5560/**
5561 * @opcode 0xb4
5562 */
5563FNIEMOP_DEF(iemOp_mov_AH_Ib)
5564{
5565 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5566 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5567}
5568
5569
5570/**
5571 * @opcode 0xb5
5572 */
5573FNIEMOP_DEF(iemOp_CH_Ib)
5574{
5575 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5576 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5577}
5578
5579
5580/**
5581 * @opcode 0xb6
5582 */
5583FNIEMOP_DEF(iemOp_DH_Ib)
5584{
5585 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5586 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5587}
5588
5589
5590/**
5591 * @opcode 0xb7
5592 */
5593FNIEMOP_DEF(iemOp_BH_Ib)
5594{
5595 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5596 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5597}
5598
5599
5600/**
5601 * Common 'mov regX,immX' helper.
5602 */
5603FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5604{
5605 switch (pVCpu->iem.s.enmEffOpSize)
5606 {
5607 case IEMMODE_16BIT:
5608 {
5609 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5611
5612 IEM_MC_BEGIN(0, 1);
5613 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5614 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5615 IEM_MC_ADVANCE_RIP();
5616 IEM_MC_END();
5617 break;
5618 }
5619
5620 case IEMMODE_32BIT:
5621 {
5622 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624
5625 IEM_MC_BEGIN(0, 1);
5626 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5627 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 break;
5631 }
5632 case IEMMODE_64BIT:
5633 {
5634 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5636
5637 IEM_MC_BEGIN(0, 1);
5638 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5639 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5640 IEM_MC_ADVANCE_RIP();
5641 IEM_MC_END();
5642 break;
5643 }
5644 }
5645
5646 return VINF_SUCCESS;
5647}
5648
5649
5650/**
5651 * @opcode 0xb8
5652 */
5653FNIEMOP_DEF(iemOp_eAX_Iv)
5654{
5655 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5656 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5657}
5658
5659
5660/**
5661 * @opcode 0xb9
5662 */
5663FNIEMOP_DEF(iemOp_eCX_Iv)
5664{
5665 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5666 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5667}
5668
5669
5670/**
5671 * @opcode 0xba
5672 */
5673FNIEMOP_DEF(iemOp_eDX_Iv)
5674{
5675 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5676 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5677}
5678
5679
5680/**
5681 * @opcode 0xbb
5682 */
5683FNIEMOP_DEF(iemOp_eBX_Iv)
5684{
5685 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5686 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5687}
5688
5689
5690/**
5691 * @opcode 0xbc
5692 */
5693FNIEMOP_DEF(iemOp_eSP_Iv)
5694{
5695 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5696 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5697}
5698
5699
5700/**
5701 * @opcode 0xbd
5702 */
5703FNIEMOP_DEF(iemOp_eBP_Iv)
5704{
5705 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5706 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5707}
5708
5709
5710/**
5711 * @opcode 0xbe
5712 */
5713FNIEMOP_DEF(iemOp_eSI_Iv)
5714{
5715 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5716 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5717}
5718
5719
5720/**
5721 * @opcode 0xbf
5722 */
5723FNIEMOP_DEF(iemOp_eDI_Iv)
5724{
5725 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5726 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5727}
5728
5729
5730/**
5731 * @opcode 0xc0
5732 */
5733FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5734{
5735 IEMOP_HLP_MIN_186();
5736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5737 PCIEMOPSHIFTSIZES pImpl;
5738 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5739 {
5740 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5741 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5742 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5743 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5744 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5745 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5746 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5747 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5748 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5749 }
5750 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5751
5752 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5753 {
5754 /* register */
5755 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5757 IEM_MC_BEGIN(3, 0);
5758 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5759 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5760 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5761 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5762 IEM_MC_REF_EFLAGS(pEFlags);
5763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 }
5767 else
5768 {
5769 /* memory */
5770 IEM_MC_BEGIN(3, 2);
5771 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5772 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5773 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5775
5776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5777 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5778 IEM_MC_ASSIGN(cShiftArg, cShift);
5779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5780 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5781 IEM_MC_FETCH_EFLAGS(EFlags);
5782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5783
5784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5785 IEM_MC_COMMIT_EFLAGS(EFlags);
5786 IEM_MC_ADVANCE_RIP();
5787 IEM_MC_END();
5788 }
5789 return VINF_SUCCESS;
5790}
5791
5792
5793/**
5794 * @opcode 0xc1
5795 */
5796FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5797{
5798 IEMOP_HLP_MIN_186();
5799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5800 PCIEMOPSHIFTSIZES pImpl;
5801 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5802 {
5803 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5804 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5805 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5806 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5807 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5808 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5809 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5810 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5811 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5812 }
5813 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5814
5815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5816 {
5817 /* register */
5818 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5820 switch (pVCpu->iem.s.enmEffOpSize)
5821 {
5822 case IEMMODE_16BIT:
5823 IEM_MC_BEGIN(3, 0);
5824 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5825 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5826 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5827 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5828 IEM_MC_REF_EFLAGS(pEFlags);
5829 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5830 IEM_MC_ADVANCE_RIP();
5831 IEM_MC_END();
5832 return VINF_SUCCESS;
5833
5834 case IEMMODE_32BIT:
5835 IEM_MC_BEGIN(3, 0);
5836 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5837 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5838 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5839 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5840 IEM_MC_REF_EFLAGS(pEFlags);
5841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5842 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5843 IEM_MC_ADVANCE_RIP();
5844 IEM_MC_END();
5845 return VINF_SUCCESS;
5846
5847 case IEMMODE_64BIT:
5848 IEM_MC_BEGIN(3, 0);
5849 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5850 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5851 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5852 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5853 IEM_MC_REF_EFLAGS(pEFlags);
5854 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5855 IEM_MC_ADVANCE_RIP();
5856 IEM_MC_END();
5857 return VINF_SUCCESS;
5858
5859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5860 }
5861 }
5862 else
5863 {
5864 /* memory */
5865 switch (pVCpu->iem.s.enmEffOpSize)
5866 {
5867 case IEMMODE_16BIT:
5868 IEM_MC_BEGIN(3, 2);
5869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5871 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5873
5874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5875 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5876 IEM_MC_ASSIGN(cShiftArg, cShift);
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5879 IEM_MC_FETCH_EFLAGS(EFlags);
5880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5881
5882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5883 IEM_MC_COMMIT_EFLAGS(EFlags);
5884 IEM_MC_ADVANCE_RIP();
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 case IEMMODE_32BIT:
5889 IEM_MC_BEGIN(3, 2);
5890 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5891 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5892 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5894
5895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5896 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5897 IEM_MC_ASSIGN(cShiftArg, cShift);
5898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5899 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5900 IEM_MC_FETCH_EFLAGS(EFlags);
5901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5902
5903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5904 IEM_MC_COMMIT_EFLAGS(EFlags);
5905 IEM_MC_ADVANCE_RIP();
5906 IEM_MC_END();
5907 return VINF_SUCCESS;
5908
5909 case IEMMODE_64BIT:
5910 IEM_MC_BEGIN(3, 2);
5911 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5912 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5913 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5915
5916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5917 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5918 IEM_MC_ASSIGN(cShiftArg, cShift);
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5921 IEM_MC_FETCH_EFLAGS(EFlags);
5922 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5923
5924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5925 IEM_MC_COMMIT_EFLAGS(EFlags);
5926 IEM_MC_ADVANCE_RIP();
5927 IEM_MC_END();
5928 return VINF_SUCCESS;
5929
5930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5931 }
5932 }
5933}
5934
5935
5936/**
5937 * @opcode 0xc2
5938 */
5939FNIEMOP_DEF(iemOp_retn_Iw)
5940{
5941 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5942 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5945 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5946}
5947
5948
5949/**
5950 * @opcode 0xc3
5951 */
5952FNIEMOP_DEF(iemOp_retn)
5953{
5954 IEMOP_MNEMONIC(retn, "retn");
5955 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5957 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5958}
5959
5960
5961/**
5962 * @opcode 0xc4
5963 */
5964FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5965{
5966 /* The LES instruction is invalid 64-bit mode. In legacy and
5967 compatability mode it is invalid with MOD=3.
5968 The use as a VEX prefix is made possible by assigning the inverted
5969 REX.R to the top MOD bit, and the top bit in the inverted register
5970 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5971 to accessing registers 0..7 in this VEX form. */
5972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5973 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5974 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5975 {
5976 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5977 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5978 {
5979 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5980 if ( ( pVCpu->iem.s.fPrefixes
5981 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5982 == 0)
5983 {
5984 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5985 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5986 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5987 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5988 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5989
5990 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5991 }
5992
5993 Log(("VEX2: Invalid prefix mix!\n"));
5994 }
5995 else
5996 Log(("VEX2: AVX support disabled!\n"));
5997
5998 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5999 return IEMOP_RAISE_INVALID_OPCODE();
6000 }
6001 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6002 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6003}
6004
6005
6006/**
6007 * @opcode 0xc5
6008 */
6009FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
6010{
6011 /* The LDS instruction is invalid 64-bit mode. In legacy and
6012 compatability mode it is invalid with MOD=3.
6013 The use as a VEX prefix is made possible by assigning the inverted
6014 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6015 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6017 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
6018 {
6019 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6020 {
6021 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6022 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6023 }
6024 IEMOP_HLP_NO_REAL_OR_V86_MODE();
6025 }
6026
6027 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6028 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6029 {
6030 /** @todo Test when exctly the VEX conformance checks kick in during
6031 * instruction decoding and fetching (using \#PF). */
6032 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6033 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6034 if ( ( pVCpu->iem.s.fPrefixes
6035 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6036 == 0)
6037 {
6038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6039 if (bVex2 & 0x80 /* VEX.W */)
6040 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6041 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6042 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6043 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6044 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6045 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6046 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6047
6048 switch (bRm & 0x1f)
6049 {
6050 case 1: /* 0x0f lead opcode byte. */
6051 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6052
6053 case 2: /* 0x0f 0x38 lead opcode bytes. */
6054 /** @todo VEX: Just use new tables and decoders. */
6055 IEMOP_BITCH_ABOUT_STUB();
6056 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6057
6058 case 3: /* 0x0f 0x3a lead opcode bytes. */
6059 /** @todo VEX: Just use new tables and decoders. */
6060 IEMOP_BITCH_ABOUT_STUB();
6061 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6062
6063 default:
6064 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6065 return IEMOP_RAISE_INVALID_OPCODE();
6066 }
6067 }
6068 else
6069 Log(("VEX3: Invalid prefix mix!\n"));
6070 }
6071 else
6072 Log(("VEX3: AVX support disabled!\n"));
6073 return IEMOP_RAISE_INVALID_OPCODE();
6074}
6075
6076
6077/**
6078 * @opcode 0xc6
6079 */
6080FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6081{
6082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6083 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6084 return IEMOP_RAISE_INVALID_OPCODE();
6085 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6086
6087 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6088 {
6089 /* register access */
6090 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092 IEM_MC_BEGIN(0, 0);
6093 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6094 IEM_MC_ADVANCE_RIP();
6095 IEM_MC_END();
6096 }
6097 else
6098 {
6099 /* memory access. */
6100 IEM_MC_BEGIN(0, 1);
6101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6103 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6105 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6106 IEM_MC_ADVANCE_RIP();
6107 IEM_MC_END();
6108 }
6109 return VINF_SUCCESS;
6110}
6111
6112
6113/**
6114 * @opcode 0xc7
6115 */
6116FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6117{
6118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6119 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6120 return IEMOP_RAISE_INVALID_OPCODE();
6121 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6122
6123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6124 {
6125 /* register access */
6126 switch (pVCpu->iem.s.enmEffOpSize)
6127 {
6128 case IEMMODE_16BIT:
6129 IEM_MC_BEGIN(0, 0);
6130 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6133 IEM_MC_ADVANCE_RIP();
6134 IEM_MC_END();
6135 return VINF_SUCCESS;
6136
6137 case IEMMODE_32BIT:
6138 IEM_MC_BEGIN(0, 0);
6139 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6142 IEM_MC_ADVANCE_RIP();
6143 IEM_MC_END();
6144 return VINF_SUCCESS;
6145
6146 case IEMMODE_64BIT:
6147 IEM_MC_BEGIN(0, 0);
6148 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6150 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6151 IEM_MC_ADVANCE_RIP();
6152 IEM_MC_END();
6153 return VINF_SUCCESS;
6154
6155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6156 }
6157 }
6158 else
6159 {
6160 /* memory access. */
6161 switch (pVCpu->iem.s.enmEffOpSize)
6162 {
6163 case IEMMODE_16BIT:
6164 IEM_MC_BEGIN(0, 1);
6165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6167 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6169 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(0, 1);
6176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6178 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6181 IEM_MC_ADVANCE_RIP();
6182 IEM_MC_END();
6183 return VINF_SUCCESS;
6184
6185 case IEMMODE_64BIT:
6186 IEM_MC_BEGIN(0, 1);
6187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6189 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6191 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6192 IEM_MC_ADVANCE_RIP();
6193 IEM_MC_END();
6194 return VINF_SUCCESS;
6195
6196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6197 }
6198 }
6199}
6200
6201
6202
6203
6204/**
6205 * @opcode 0xc8
6206 */
6207FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6208{
6209 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6210 IEMOP_HLP_MIN_186();
6211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6212 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6213 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6215 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6216}
6217
6218
6219/**
6220 * @opcode 0xc9
6221 */
6222FNIEMOP_DEF(iemOp_leave)
6223{
6224 IEMOP_MNEMONIC(leave, "leave");
6225 IEMOP_HLP_MIN_186();
6226 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6228 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6229}
6230
6231
6232/**
6233 * @opcode 0xca
6234 */
6235FNIEMOP_DEF(iemOp_retf_Iw)
6236{
6237 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6238 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6241 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6242}
6243
6244
6245/**
6246 * @opcode 0xcb
6247 */
6248FNIEMOP_DEF(iemOp_retf)
6249{
6250 IEMOP_MNEMONIC(retf, "retf");
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6253 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6254}
6255
6256
6257/**
6258 * @opcode 0xcc
6259 */
6260FNIEMOP_DEF(iemOp_int3)
6261{
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6263 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6264}
6265
6266
6267/**
6268 * @opcode 0xcd
6269 */
6270FNIEMOP_DEF(iemOp_int_Ib)
6271{
6272 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6274 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6275}
6276
6277
6278/**
6279 * @opcode 0xce
6280 */
6281FNIEMOP_DEF(iemOp_into)
6282{
6283 IEMOP_MNEMONIC(into, "into");
6284 IEMOP_HLP_NO_64BIT();
6285
6286 IEM_MC_BEGIN(2, 0);
6287 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6288 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6289 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292}
6293
6294
6295/**
6296 * @opcode 0xcf
6297 */
6298FNIEMOP_DEF(iemOp_iret)
6299{
6300 IEMOP_MNEMONIC(iret, "iret");
6301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6302 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6303}
6304
6305
6306/**
6307 * @opcode 0xd0
6308 */
6309FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6310{
6311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6312 PCIEMOPSHIFTSIZES pImpl;
6313 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6314 {
6315 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6316 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6317 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6318 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6319 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6320 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6321 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6322 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6323 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6324 }
6325 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6326
6327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6328 {
6329 /* register */
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331 IEM_MC_BEGIN(3, 0);
6332 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6333 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6335 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6336 IEM_MC_REF_EFLAGS(pEFlags);
6337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 }
6341 else
6342 {
6343 /* memory */
6344 IEM_MC_BEGIN(3, 2);
6345 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6346 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6347 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6349
6350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6352 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6353 IEM_MC_FETCH_EFLAGS(EFlags);
6354 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6355
6356 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6357 IEM_MC_COMMIT_EFLAGS(EFlags);
6358 IEM_MC_ADVANCE_RIP();
6359 IEM_MC_END();
6360 }
6361 return VINF_SUCCESS;
6362}
6363
6364
6365
6366/**
6367 * @opcode 0xd1
6368 */
6369FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6370{
6371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6372 PCIEMOPSHIFTSIZES pImpl;
6373 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6374 {
6375 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6376 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6377 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6378 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6379 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6380 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6381 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6382 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6383 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6384 }
6385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6386
6387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6388 {
6389 /* register */
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 switch (pVCpu->iem.s.enmEffOpSize)
6392 {
6393 case IEMMODE_16BIT:
6394 IEM_MC_BEGIN(3, 0);
6395 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6396 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6397 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6398 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6399 IEM_MC_REF_EFLAGS(pEFlags);
6400 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6401 IEM_MC_ADVANCE_RIP();
6402 IEM_MC_END();
6403 return VINF_SUCCESS;
6404
6405 case IEMMODE_32BIT:
6406 IEM_MC_BEGIN(3, 0);
6407 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6408 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6410 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6411 IEM_MC_REF_EFLAGS(pEFlags);
6412 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6413 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6414 IEM_MC_ADVANCE_RIP();
6415 IEM_MC_END();
6416 return VINF_SUCCESS;
6417
6418 case IEMMODE_64BIT:
6419 IEM_MC_BEGIN(3, 0);
6420 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6421 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6422 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6423 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6424 IEM_MC_REF_EFLAGS(pEFlags);
6425 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6426 IEM_MC_ADVANCE_RIP();
6427 IEM_MC_END();
6428 return VINF_SUCCESS;
6429
6430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6431 }
6432 }
6433 else
6434 {
6435 /* memory */
6436 switch (pVCpu->iem.s.enmEffOpSize)
6437 {
6438 case IEMMODE_16BIT:
6439 IEM_MC_BEGIN(3, 2);
6440 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6441 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6442 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6444
6445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6447 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6448 IEM_MC_FETCH_EFLAGS(EFlags);
6449 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6450
6451 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6452 IEM_MC_COMMIT_EFLAGS(EFlags);
6453 IEM_MC_ADVANCE_RIP();
6454 IEM_MC_END();
6455 return VINF_SUCCESS;
6456
6457 case IEMMODE_32BIT:
6458 IEM_MC_BEGIN(3, 2);
6459 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6460 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6461 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6463
6464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6467 IEM_MC_FETCH_EFLAGS(EFlags);
6468 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6469
6470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6471 IEM_MC_COMMIT_EFLAGS(EFlags);
6472 IEM_MC_ADVANCE_RIP();
6473 IEM_MC_END();
6474 return VINF_SUCCESS;
6475
6476 case IEMMODE_64BIT:
6477 IEM_MC_BEGIN(3, 2);
6478 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6479 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6480 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6482
6483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6486 IEM_MC_FETCH_EFLAGS(EFlags);
6487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6488
6489 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6490 IEM_MC_COMMIT_EFLAGS(EFlags);
6491 IEM_MC_ADVANCE_RIP();
6492 IEM_MC_END();
6493 return VINF_SUCCESS;
6494
6495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6496 }
6497 }
6498}
6499
6500
6501/**
6502 * @opcode 0xd2
6503 */
6504FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6505{
6506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6507 PCIEMOPSHIFTSIZES pImpl;
6508 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6509 {
6510 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6511 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6512 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6513 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6514 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6515 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6516 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6517 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6518 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6519 }
6520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6521
6522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6523 {
6524 /* register */
6525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6526 IEM_MC_BEGIN(3, 0);
6527 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6528 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6530 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6531 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6532 IEM_MC_REF_EFLAGS(pEFlags);
6533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6534 IEM_MC_ADVANCE_RIP();
6535 IEM_MC_END();
6536 }
6537 else
6538 {
6539 /* memory */
6540 IEM_MC_BEGIN(3, 2);
6541 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6542 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6543 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6545
6546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6548 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6549 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6550 IEM_MC_FETCH_EFLAGS(EFlags);
6551 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6552
6553 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6554 IEM_MC_COMMIT_EFLAGS(EFlags);
6555 IEM_MC_ADVANCE_RIP();
6556 IEM_MC_END();
6557 }
6558 return VINF_SUCCESS;
6559}
6560
6561
6562/**
6563 * @opcode 0xd3
6564 */
6565FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6566{
6567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6568 PCIEMOPSHIFTSIZES pImpl;
6569 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6570 {
6571 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6572 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6573 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6574 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6575 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6576 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6577 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6578 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6579 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6580 }
6581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6582
6583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6584 {
6585 /* register */
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6587 switch (pVCpu->iem.s.enmEffOpSize)
6588 {
6589 case IEMMODE_16BIT:
6590 IEM_MC_BEGIN(3, 0);
6591 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6592 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6594 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6595 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6596 IEM_MC_REF_EFLAGS(pEFlags);
6597 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6598 IEM_MC_ADVANCE_RIP();
6599 IEM_MC_END();
6600 return VINF_SUCCESS;
6601
6602 case IEMMODE_32BIT:
6603 IEM_MC_BEGIN(3, 0);
6604 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6605 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6607 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6608 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6609 IEM_MC_REF_EFLAGS(pEFlags);
6610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6611 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6612 IEM_MC_ADVANCE_RIP();
6613 IEM_MC_END();
6614 return VINF_SUCCESS;
6615
6616 case IEMMODE_64BIT:
6617 IEM_MC_BEGIN(3, 0);
6618 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6619 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6620 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6621 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6622 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6623 IEM_MC_REF_EFLAGS(pEFlags);
6624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6630 }
6631 }
6632 else
6633 {
6634 /* memory */
6635 switch (pVCpu->iem.s.enmEffOpSize)
6636 {
6637 case IEMMODE_16BIT:
6638 IEM_MC_BEGIN(3, 2);
6639 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6640 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6641 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6643
6644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6646 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6647 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6648 IEM_MC_FETCH_EFLAGS(EFlags);
6649 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6650
6651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6652 IEM_MC_COMMIT_EFLAGS(EFlags);
6653 IEM_MC_ADVANCE_RIP();
6654 IEM_MC_END();
6655 return VINF_SUCCESS;
6656
6657 case IEMMODE_32BIT:
6658 IEM_MC_BEGIN(3, 2);
6659 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6660 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6661 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6663
6664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6666 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6667 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6668 IEM_MC_FETCH_EFLAGS(EFlags);
6669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6670
6671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6672 IEM_MC_COMMIT_EFLAGS(EFlags);
6673 IEM_MC_ADVANCE_RIP();
6674 IEM_MC_END();
6675 return VINF_SUCCESS;
6676
6677 case IEMMODE_64BIT:
6678 IEM_MC_BEGIN(3, 2);
6679 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6680 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6681 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6683
6684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6686 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6687 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6688 IEM_MC_FETCH_EFLAGS(EFlags);
6689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6690
6691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6692 IEM_MC_COMMIT_EFLAGS(EFlags);
6693 IEM_MC_ADVANCE_RIP();
6694 IEM_MC_END();
6695 return VINF_SUCCESS;
6696
6697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6698 }
6699 }
6700}
6701
6702/**
6703 * @opcode 0xd4
6704 */
6705FNIEMOP_DEF(iemOp_aam_Ib)
6706{
6707 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6708 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6710 IEMOP_HLP_NO_64BIT();
6711 if (!bImm)
6712 return IEMOP_RAISE_DIVIDE_ERROR();
6713 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6714}
6715
6716
6717/**
6718 * @opcode 0xd5
6719 */
6720FNIEMOP_DEF(iemOp_aad_Ib)
6721{
6722 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6723 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6725 IEMOP_HLP_NO_64BIT();
6726 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6727}
6728
6729
6730/**
6731 * @opcode 0xd6
6732 */
6733FNIEMOP_DEF(iemOp_salc)
6734{
6735 IEMOP_MNEMONIC(salc, "salc");
6736 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6737 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6739 IEMOP_HLP_NO_64BIT();
6740
6741 IEM_MC_BEGIN(0, 0);
6742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6743 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6744 } IEM_MC_ELSE() {
6745 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6746 } IEM_MC_ENDIF();
6747 IEM_MC_ADVANCE_RIP();
6748 IEM_MC_END();
6749 return VINF_SUCCESS;
6750}
6751
6752
6753/**
6754 * @opcode 0xd7
6755 */
6756FNIEMOP_DEF(iemOp_xlat)
6757{
6758 IEMOP_MNEMONIC(xlat, "xlat");
6759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6760 switch (pVCpu->iem.s.enmEffAddrMode)
6761 {
6762 case IEMMODE_16BIT:
6763 IEM_MC_BEGIN(2, 0);
6764 IEM_MC_LOCAL(uint8_t, u8Tmp);
6765 IEM_MC_LOCAL(uint16_t, u16Addr);
6766 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6767 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6768 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6769 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 return VINF_SUCCESS;
6773
6774 case IEMMODE_32BIT:
6775 IEM_MC_BEGIN(2, 0);
6776 IEM_MC_LOCAL(uint8_t, u8Tmp);
6777 IEM_MC_LOCAL(uint32_t, u32Addr);
6778 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6779 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6780 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6781 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6782 IEM_MC_ADVANCE_RIP();
6783 IEM_MC_END();
6784 return VINF_SUCCESS;
6785
6786 case IEMMODE_64BIT:
6787 IEM_MC_BEGIN(2, 0);
6788 IEM_MC_LOCAL(uint8_t, u8Tmp);
6789 IEM_MC_LOCAL(uint64_t, u64Addr);
6790 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6791 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6792 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6793 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6794 IEM_MC_ADVANCE_RIP();
6795 IEM_MC_END();
6796 return VINF_SUCCESS;
6797
6798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6799 }
6800}
6801
6802
6803/**
6804 * Common worker for FPU instructions working on ST0 and STn, and storing the
6805 * result in ST0.
6806 *
6807 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6808 */
6809FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6810{
6811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6812
6813 IEM_MC_BEGIN(3, 1);
6814 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6815 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6816 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6817 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6818
6819 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6820 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6821 IEM_MC_PREPARE_FPU_USAGE();
6822 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6823 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6824 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6825 IEM_MC_ELSE()
6826 IEM_MC_FPU_STACK_UNDERFLOW(0);
6827 IEM_MC_ENDIF();
6828 IEM_MC_ADVANCE_RIP();
6829
6830 IEM_MC_END();
6831 return VINF_SUCCESS;
6832}
6833
6834
6835/**
6836 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6837 * flags.
6838 *
6839 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6840 */
6841FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6842{
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844
6845 IEM_MC_BEGIN(3, 1);
6846 IEM_MC_LOCAL(uint16_t, u16Fsw);
6847 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6848 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6850
6851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6853 IEM_MC_PREPARE_FPU_USAGE();
6854 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6855 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6856 IEM_MC_UPDATE_FSW(u16Fsw);
6857 IEM_MC_ELSE()
6858 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6859 IEM_MC_ENDIF();
6860 IEM_MC_ADVANCE_RIP();
6861
6862 IEM_MC_END();
6863 return VINF_SUCCESS;
6864}
6865
6866
6867/**
6868 * Common worker for FPU instructions working on ST0 and STn, only affecting
6869 * flags, and popping when done.
6870 *
6871 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6872 */
6873FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6874{
6875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6876
6877 IEM_MC_BEGIN(3, 1);
6878 IEM_MC_LOCAL(uint16_t, u16Fsw);
6879 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6880 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6881 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6882
6883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6885 IEM_MC_PREPARE_FPU_USAGE();
6886 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6887 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6888 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6889 IEM_MC_ELSE()
6890 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6891 IEM_MC_ENDIF();
6892 IEM_MC_ADVANCE_RIP();
6893
6894 IEM_MC_END();
6895 return VINF_SUCCESS;
6896}
6897
6898
6899/** Opcode 0xd8 11/0. */
6900FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6901{
6902 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6903 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6904}
6905
6906
6907/** Opcode 0xd8 11/1. */
6908FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6909{
6910 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6911 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6912}
6913
6914
6915/** Opcode 0xd8 11/2. */
6916FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6917{
6918 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6919 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6920}
6921
6922
6923/** Opcode 0xd8 11/3. */
6924FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6925{
6926 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6927 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6928}
6929
6930
6931/** Opcode 0xd8 11/4. */
6932FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6933{
6934 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6935 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6936}
6937
6938
6939/** Opcode 0xd8 11/5. */
6940FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6941{
6942 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6943 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6944}
6945
6946
6947/** Opcode 0xd8 11/6. */
6948FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6949{
6950 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6951 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6952}
6953
6954
6955/** Opcode 0xd8 11/7. */
6956FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6957{
6958 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6959 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6960}
6961
6962
6963/**
6964 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6965 * the result in ST0.
6966 *
6967 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6968 */
6969FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6970{
6971 IEM_MC_BEGIN(3, 3);
6972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6973 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6974 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6975 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6977 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6978
6979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6981
6982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6984 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6985
6986 IEM_MC_PREPARE_FPU_USAGE();
6987 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6988 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6989 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6990 IEM_MC_ELSE()
6991 IEM_MC_FPU_STACK_UNDERFLOW(0);
6992 IEM_MC_ENDIF();
6993 IEM_MC_ADVANCE_RIP();
6994
6995 IEM_MC_END();
6996 return VINF_SUCCESS;
6997}
6998
6999
7000/** Opcode 0xd8 !11/0. */
7001FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7002{
7003 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7004 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7005}
7006
7007
7008/** Opcode 0xd8 !11/1. */
7009FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7010{
7011 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7012 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7013}
7014
7015
7016/** Opcode 0xd8 !11/2. */
7017FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7018{
7019 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7020
7021 IEM_MC_BEGIN(3, 3);
7022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7023 IEM_MC_LOCAL(uint16_t, u16Fsw);
7024 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7025 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7027 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7028
7029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7031
7032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7034 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7035
7036 IEM_MC_PREPARE_FPU_USAGE();
7037 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7038 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7039 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7040 IEM_MC_ELSE()
7041 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7042 IEM_MC_ENDIF();
7043 IEM_MC_ADVANCE_RIP();
7044
7045 IEM_MC_END();
7046 return VINF_SUCCESS;
7047}
7048
7049
7050/** Opcode 0xd8 !11/3. */
7051FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7052{
7053 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7054
7055 IEM_MC_BEGIN(3, 3);
7056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7057 IEM_MC_LOCAL(uint16_t, u16Fsw);
7058 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7059 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7061 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7062
7063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7065
7066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7067 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7068 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7069
7070 IEM_MC_PREPARE_FPU_USAGE();
7071 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7072 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7073 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7074 IEM_MC_ELSE()
7075 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7076 IEM_MC_ENDIF();
7077 IEM_MC_ADVANCE_RIP();
7078
7079 IEM_MC_END();
7080 return VINF_SUCCESS;
7081}
7082
7083
7084/** Opcode 0xd8 !11/4. */
7085FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7086{
7087 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7088 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7089}
7090
7091
7092/** Opcode 0xd8 !11/5. */
7093FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7094{
7095 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7096 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7097}
7098
7099
7100/** Opcode 0xd8 !11/6. */
7101FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7102{
7103 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7104 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7105}
7106
7107
7108/** Opcode 0xd8 !11/7. */
7109FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7110{
7111 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7112 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7113}
7114
7115
7116/**
7117 * @opcode 0xd8
7118 */
7119FNIEMOP_DEF(iemOp_EscF0)
7120{
7121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7122 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7123
7124 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7125 {
7126 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7127 {
7128 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7129 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7130 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7131 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7132 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7133 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7134 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7135 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7137 }
7138 }
7139 else
7140 {
7141 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7142 {
7143 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7144 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7145 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7146 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7147 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7148 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7149 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7150 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7152 }
7153 }
7154}
7155
7156
7157/** Opcode 0xd9 /0 mem32real
7158 * @sa iemOp_fld_m64r */
7159FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7160{
7161 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7162
7163 IEM_MC_BEGIN(2, 3);
7164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7165 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7166 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7167 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7168 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7169
7170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7172
7173 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7174 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7175 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7176
7177 IEM_MC_PREPARE_FPU_USAGE();
7178 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7179 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7180 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7181 IEM_MC_ELSE()
7182 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7183 IEM_MC_ENDIF();
7184 IEM_MC_ADVANCE_RIP();
7185
7186 IEM_MC_END();
7187 return VINF_SUCCESS;
7188}
7189
7190
7191/** Opcode 0xd9 !11/2 mem32real */
7192FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7193{
7194 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7195 IEM_MC_BEGIN(3, 2);
7196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7197 IEM_MC_LOCAL(uint16_t, u16Fsw);
7198 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7199 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7201
7202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7204 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7205 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7206
7207 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7208 IEM_MC_PREPARE_FPU_USAGE();
7209 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7210 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7211 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7212 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7213 IEM_MC_ELSE()
7214 IEM_MC_IF_FCW_IM()
7215 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7216 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7217 IEM_MC_ENDIF();
7218 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7219 IEM_MC_ENDIF();
7220 IEM_MC_ADVANCE_RIP();
7221
7222 IEM_MC_END();
7223 return VINF_SUCCESS;
7224}
7225
7226
7227/** Opcode 0xd9 !11/3 */
7228FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7229{
7230 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7231 IEM_MC_BEGIN(3, 2);
7232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7233 IEM_MC_LOCAL(uint16_t, u16Fsw);
7234 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7235 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7237
7238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7241 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7242
7243 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7244 IEM_MC_PREPARE_FPU_USAGE();
7245 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7246 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7247 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7248 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7249 IEM_MC_ELSE()
7250 IEM_MC_IF_FCW_IM()
7251 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7252 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7253 IEM_MC_ENDIF();
7254 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7255 IEM_MC_ENDIF();
7256 IEM_MC_ADVANCE_RIP();
7257
7258 IEM_MC_END();
7259 return VINF_SUCCESS;
7260}
7261
7262
7263/** Opcode 0xd9 !11/4 */
7264FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7265{
7266 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7267 IEM_MC_BEGIN(3, 0);
7268 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7269 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7270 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7275 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7276 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7277 IEM_MC_END();
7278 return VINF_SUCCESS;
7279}
7280
7281
7282/** Opcode 0xd9 !11/5 */
7283FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7284{
7285 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7286 IEM_MC_BEGIN(1, 1);
7287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7288 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7292 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7293 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7294 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7295 IEM_MC_END();
7296 return VINF_SUCCESS;
7297}
7298
7299
7300/** Opcode 0xd9 !11/6 */
7301FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7302{
7303 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7304 IEM_MC_BEGIN(3, 0);
7305 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7306 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7307 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7311 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7312 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7313 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7314 IEM_MC_END();
7315 return VINF_SUCCESS;
7316}
7317
7318
7319/** Opcode 0xd9 !11/7 */
7320FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7321{
7322 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7323 IEM_MC_BEGIN(2, 0);
7324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7325 IEM_MC_LOCAL(uint16_t, u16Fcw);
7326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7329 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7330 IEM_MC_FETCH_FCW(u16Fcw);
7331 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7332 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7333 IEM_MC_END();
7334 return VINF_SUCCESS;
7335}
7336
7337
7338/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7339FNIEMOP_DEF(iemOp_fnop)
7340{
7341 IEMOP_MNEMONIC(fnop, "fnop");
7342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7343
7344 IEM_MC_BEGIN(0, 0);
7345 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7346 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7347 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7348 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7349 * intel optimizations. Investigate. */
7350 IEM_MC_UPDATE_FPU_OPCODE_IP();
7351 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7352 IEM_MC_END();
7353 return VINF_SUCCESS;
7354}
7355
7356
7357/** Opcode 0xd9 11/0 stN */
7358FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7359{
7360 IEMOP_MNEMONIC(fld_stN, "fld stN");
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362
7363 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7364 * indicates that it does. */
7365 IEM_MC_BEGIN(0, 2);
7366 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7367 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7370
7371 IEM_MC_PREPARE_FPU_USAGE();
7372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7373 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7374 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7375 IEM_MC_ELSE()
7376 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7377 IEM_MC_ENDIF();
7378
7379 IEM_MC_ADVANCE_RIP();
7380 IEM_MC_END();
7381
7382 return VINF_SUCCESS;
7383}
7384
7385
7386/** Opcode 0xd9 11/3 stN */
7387FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7388{
7389 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7391
7392 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7393 * indicates that it does. */
7394 IEM_MC_BEGIN(1, 3);
7395 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7396 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7397 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7398 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7401
7402 IEM_MC_PREPARE_FPU_USAGE();
7403 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7404 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7405 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7406 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7407 IEM_MC_ELSE()
7408 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7409 IEM_MC_ENDIF();
7410
7411 IEM_MC_ADVANCE_RIP();
7412 IEM_MC_END();
7413
7414 return VINF_SUCCESS;
7415}
7416
7417
7418/** Opcode 0xd9 11/4, 0xdd 11/2. */
7419FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7420{
7421 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423
7424 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7425 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7426 if (!iDstReg)
7427 {
7428 IEM_MC_BEGIN(0, 1);
7429 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7432
7433 IEM_MC_PREPARE_FPU_USAGE();
7434 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7435 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7436 IEM_MC_ELSE()
7437 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7438 IEM_MC_ENDIF();
7439
7440 IEM_MC_ADVANCE_RIP();
7441 IEM_MC_END();
7442 }
7443 else
7444 {
7445 IEM_MC_BEGIN(0, 2);
7446 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7447 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7450
7451 IEM_MC_PREPARE_FPU_USAGE();
7452 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7453 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7454 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7455 IEM_MC_ELSE()
7456 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7457 IEM_MC_ENDIF();
7458
7459 IEM_MC_ADVANCE_RIP();
7460 IEM_MC_END();
7461 }
7462 return VINF_SUCCESS;
7463}
7464
7465
7466/**
7467 * Common worker for FPU instructions working on ST0 and replaces it with the
7468 * result, i.e. unary operators.
7469 *
7470 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7471 */
7472FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7473{
7474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7475
7476 IEM_MC_BEGIN(2, 1);
7477 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7478 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7479 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7480
7481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7483 IEM_MC_PREPARE_FPU_USAGE();
7484 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7485 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7486 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7487 IEM_MC_ELSE()
7488 IEM_MC_FPU_STACK_UNDERFLOW(0);
7489 IEM_MC_ENDIF();
7490 IEM_MC_ADVANCE_RIP();
7491
7492 IEM_MC_END();
7493 return VINF_SUCCESS;
7494}
7495
7496
7497/** Opcode 0xd9 0xe0. */
7498FNIEMOP_DEF(iemOp_fchs)
7499{
7500 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7501 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7502}
7503
7504
7505/** Opcode 0xd9 0xe1. */
7506FNIEMOP_DEF(iemOp_fabs)
7507{
7508 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7509 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7510}
7511
7512
7513/**
7514 * Common worker for FPU instructions working on ST0 and only returns FSW.
7515 *
7516 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7517 */
7518FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7519{
7520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7521
7522 IEM_MC_BEGIN(2, 1);
7523 IEM_MC_LOCAL(uint16_t, u16Fsw);
7524 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7525 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7526
7527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7529 IEM_MC_PREPARE_FPU_USAGE();
7530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7531 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7532 IEM_MC_UPDATE_FSW(u16Fsw);
7533 IEM_MC_ELSE()
7534 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7535 IEM_MC_ENDIF();
7536 IEM_MC_ADVANCE_RIP();
7537
7538 IEM_MC_END();
7539 return VINF_SUCCESS;
7540}
7541
7542
7543/** Opcode 0xd9 0xe4. */
7544FNIEMOP_DEF(iemOp_ftst)
7545{
7546 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7547 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7548}
7549
7550
7551/** Opcode 0xd9 0xe5. */
7552FNIEMOP_DEF(iemOp_fxam)
7553{
7554 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7555 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7556}
7557
7558
7559/**
7560 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7561 *
7562 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7563 */
7564FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7565{
7566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7567
7568 IEM_MC_BEGIN(1, 1);
7569 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7570 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7571
7572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7573 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7574 IEM_MC_PREPARE_FPU_USAGE();
7575 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7576 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7577 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7578 IEM_MC_ELSE()
7579 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7580 IEM_MC_ENDIF();
7581 IEM_MC_ADVANCE_RIP();
7582
7583 IEM_MC_END();
7584 return VINF_SUCCESS;
7585}
7586
7587
7588/** Opcode 0xd9 0xe8. */
7589FNIEMOP_DEF(iemOp_fld1)
7590{
7591 IEMOP_MNEMONIC(fld1, "fld1");
7592 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7593}
7594
7595
7596/** Opcode 0xd9 0xe9. */
7597FNIEMOP_DEF(iemOp_fldl2t)
7598{
7599 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7600 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7601}
7602
7603
7604/** Opcode 0xd9 0xea. */
7605FNIEMOP_DEF(iemOp_fldl2e)
7606{
7607 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7608 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7609}
7610
7611/** Opcode 0xd9 0xeb. */
7612FNIEMOP_DEF(iemOp_fldpi)
7613{
7614 IEMOP_MNEMONIC(fldpi, "fldpi");
7615 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7616}
7617
7618
7619/** Opcode 0xd9 0xec. */
7620FNIEMOP_DEF(iemOp_fldlg2)
7621{
7622 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7623 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7624}
7625
7626/** Opcode 0xd9 0xed. */
7627FNIEMOP_DEF(iemOp_fldln2)
7628{
7629 IEMOP_MNEMONIC(fldln2, "fldln2");
7630 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7631}
7632
7633
7634/** Opcode 0xd9 0xee. */
7635FNIEMOP_DEF(iemOp_fldz)
7636{
7637 IEMOP_MNEMONIC(fldz, "fldz");
7638 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7639}
7640
7641
7642/** Opcode 0xd9 0xf0. */
7643FNIEMOP_DEF(iemOp_f2xm1)
7644{
7645 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7646 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7647}
7648
7649
7650/**
7651 * Common worker for FPU instructions working on STn and ST0, storing the result
7652 * in STn, and popping the stack unless IE, DE or ZE was raised.
7653 *
7654 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7655 */
7656FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7657{
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7659
7660 IEM_MC_BEGIN(3, 1);
7661 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7662 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7665
7666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7668
7669 IEM_MC_PREPARE_FPU_USAGE();
7670 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7671 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7672 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7673 IEM_MC_ELSE()
7674 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7675 IEM_MC_ENDIF();
7676 IEM_MC_ADVANCE_RIP();
7677
7678 IEM_MC_END();
7679 return VINF_SUCCESS;
7680}
7681
7682
7683/** Opcode 0xd9 0xf1. */
7684FNIEMOP_DEF(iemOp_fyl2x)
7685{
7686 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7687 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7688}
7689
7690
7691/**
7692 * Common worker for FPU instructions working on ST0 and having two outputs, one
7693 * replacing ST0 and one pushed onto the stack.
7694 *
7695 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7696 */
7697FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7698{
7699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7700
7701 IEM_MC_BEGIN(2, 1);
7702 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7703 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7704 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7705
7706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7708 IEM_MC_PREPARE_FPU_USAGE();
7709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7710 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7711 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7712 IEM_MC_ELSE()
7713 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7714 IEM_MC_ENDIF();
7715 IEM_MC_ADVANCE_RIP();
7716
7717 IEM_MC_END();
7718 return VINF_SUCCESS;
7719}
7720
7721
7722/** Opcode 0xd9 0xf2. */
7723FNIEMOP_DEF(iemOp_fptan)
7724{
7725 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7726 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7727}
7728
7729
7730/** Opcode 0xd9 0xf3. */
7731FNIEMOP_DEF(iemOp_fpatan)
7732{
7733 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7734 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7735}
7736
7737
7738/** Opcode 0xd9 0xf4. */
7739FNIEMOP_DEF(iemOp_fxtract)
7740{
7741 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7742 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7743}
7744
7745
7746/** Opcode 0xd9 0xf5. */
7747FNIEMOP_DEF(iemOp_fprem1)
7748{
7749 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7750 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7751}
7752
7753
7754/** Opcode 0xd9 0xf6. */
7755FNIEMOP_DEF(iemOp_fdecstp)
7756{
7757 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7760 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7761 * FINCSTP and FDECSTP. */
7762
7763 IEM_MC_BEGIN(0,0);
7764
7765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7767
7768 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7769 IEM_MC_FPU_STACK_DEC_TOP();
7770 IEM_MC_UPDATE_FSW_CONST(0);
7771
7772 IEM_MC_ADVANCE_RIP();
7773 IEM_MC_END();
7774 return VINF_SUCCESS;
7775}
7776
7777
7778/** Opcode 0xd9 0xf7. */
7779FNIEMOP_DEF(iemOp_fincstp)
7780{
7781 IEMOP_MNEMONIC(fincstp, "fincstp");
7782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7783 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7784 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7785 * FINCSTP and FDECSTP. */
7786
7787 IEM_MC_BEGIN(0,0);
7788
7789 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7790 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7791
7792 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7793 IEM_MC_FPU_STACK_INC_TOP();
7794 IEM_MC_UPDATE_FSW_CONST(0);
7795
7796 IEM_MC_ADVANCE_RIP();
7797 IEM_MC_END();
7798 return VINF_SUCCESS;
7799}
7800
7801
7802/** Opcode 0xd9 0xf8. */
7803FNIEMOP_DEF(iemOp_fprem)
7804{
7805 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7806 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7807}
7808
7809
7810/** Opcode 0xd9 0xf9. */
7811FNIEMOP_DEF(iemOp_fyl2xp1)
7812{
7813 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7814 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7815}
7816
7817
7818/** Opcode 0xd9 0xfa. */
7819FNIEMOP_DEF(iemOp_fsqrt)
7820{
7821 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7822 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7823}
7824
7825
7826/** Opcode 0xd9 0xfb. */
7827FNIEMOP_DEF(iemOp_fsincos)
7828{
7829 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7830 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7831}
7832
7833
7834/** Opcode 0xd9 0xfc. */
7835FNIEMOP_DEF(iemOp_frndint)
7836{
7837 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7838 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7839}
7840
7841
7842/** Opcode 0xd9 0xfd. */
7843FNIEMOP_DEF(iemOp_fscale)
7844{
7845 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7846 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7847}
7848
7849
7850/** Opcode 0xd9 0xfe. */
7851FNIEMOP_DEF(iemOp_fsin)
7852{
7853 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7854 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7855}
7856
7857
7858/** Opcode 0xd9 0xff. */
7859FNIEMOP_DEF(iemOp_fcos)
7860{
7861 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7862 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7863}
7864
7865
7866/** Used by iemOp_EscF1. */
7867IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7868{
7869 /* 0xe0 */ iemOp_fchs,
7870 /* 0xe1 */ iemOp_fabs,
7871 /* 0xe2 */ iemOp_Invalid,
7872 /* 0xe3 */ iemOp_Invalid,
7873 /* 0xe4 */ iemOp_ftst,
7874 /* 0xe5 */ iemOp_fxam,
7875 /* 0xe6 */ iemOp_Invalid,
7876 /* 0xe7 */ iemOp_Invalid,
7877 /* 0xe8 */ iemOp_fld1,
7878 /* 0xe9 */ iemOp_fldl2t,
7879 /* 0xea */ iemOp_fldl2e,
7880 /* 0xeb */ iemOp_fldpi,
7881 /* 0xec */ iemOp_fldlg2,
7882 /* 0xed */ iemOp_fldln2,
7883 /* 0xee */ iemOp_fldz,
7884 /* 0xef */ iemOp_Invalid,
7885 /* 0xf0 */ iemOp_f2xm1,
7886 /* 0xf1 */ iemOp_fyl2x,
7887 /* 0xf2 */ iemOp_fptan,
7888 /* 0xf3 */ iemOp_fpatan,
7889 /* 0xf4 */ iemOp_fxtract,
7890 /* 0xf5 */ iemOp_fprem1,
7891 /* 0xf6 */ iemOp_fdecstp,
7892 /* 0xf7 */ iemOp_fincstp,
7893 /* 0xf8 */ iemOp_fprem,
7894 /* 0xf9 */ iemOp_fyl2xp1,
7895 /* 0xfa */ iemOp_fsqrt,
7896 /* 0xfb */ iemOp_fsincos,
7897 /* 0xfc */ iemOp_frndint,
7898 /* 0xfd */ iemOp_fscale,
7899 /* 0xfe */ iemOp_fsin,
7900 /* 0xff */ iemOp_fcos
7901};
7902
7903
7904/**
7905 * @opcode 0xd9
7906 */
7907FNIEMOP_DEF(iemOp_EscF1)
7908{
7909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7910 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7911
7912 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7913 {
7914 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7915 {
7916 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7917 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7918 case 2:
7919 if (bRm == 0xd0)
7920 return FNIEMOP_CALL(iemOp_fnop);
7921 return IEMOP_RAISE_INVALID_OPCODE();
7922 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7923 case 4:
7924 case 5:
7925 case 6:
7926 case 7:
7927 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7928 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7930 }
7931 }
7932 else
7933 {
7934 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7935 {
7936 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7937 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7938 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7939 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7940 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7941 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7942 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7943 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7945 }
7946 }
7947}
7948
7949
7950/** Opcode 0xda 11/0. */
7951FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7952{
7953 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7955
7956 IEM_MC_BEGIN(0, 1);
7957 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7958
7959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7961
7962 IEM_MC_PREPARE_FPU_USAGE();
7963 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7965 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7966 IEM_MC_ENDIF();
7967 IEM_MC_UPDATE_FPU_OPCODE_IP();
7968 IEM_MC_ELSE()
7969 IEM_MC_FPU_STACK_UNDERFLOW(0);
7970 IEM_MC_ENDIF();
7971 IEM_MC_ADVANCE_RIP();
7972
7973 IEM_MC_END();
7974 return VINF_SUCCESS;
7975}
7976
7977
7978/** Opcode 0xda 11/1. */
7979FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7980{
7981 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983
7984 IEM_MC_BEGIN(0, 1);
7985 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7986
7987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7989
7990 IEM_MC_PREPARE_FPU_USAGE();
7991 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7993 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7994 IEM_MC_ENDIF();
7995 IEM_MC_UPDATE_FPU_OPCODE_IP();
7996 IEM_MC_ELSE()
7997 IEM_MC_FPU_STACK_UNDERFLOW(0);
7998 IEM_MC_ENDIF();
7999 IEM_MC_ADVANCE_RIP();
8000
8001 IEM_MC_END();
8002 return VINF_SUCCESS;
8003}
8004
8005
8006/** Opcode 0xda 11/2. */
8007FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8008{
8009 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011
8012 IEM_MC_BEGIN(0, 1);
8013 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8014
8015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8017
8018 IEM_MC_PREPARE_FPU_USAGE();
8019 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8020 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8021 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8022 IEM_MC_ENDIF();
8023 IEM_MC_UPDATE_FPU_OPCODE_IP();
8024 IEM_MC_ELSE()
8025 IEM_MC_FPU_STACK_UNDERFLOW(0);
8026 IEM_MC_ENDIF();
8027 IEM_MC_ADVANCE_RIP();
8028
8029 IEM_MC_END();
8030 return VINF_SUCCESS;
8031}
8032
8033
8034/** Opcode 0xda 11/3. */
8035FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8036{
8037 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8039
8040 IEM_MC_BEGIN(0, 1);
8041 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8042
8043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8045
8046 IEM_MC_PREPARE_FPU_USAGE();
8047 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8049 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8050 IEM_MC_ENDIF();
8051 IEM_MC_UPDATE_FPU_OPCODE_IP();
8052 IEM_MC_ELSE()
8053 IEM_MC_FPU_STACK_UNDERFLOW(0);
8054 IEM_MC_ENDIF();
8055 IEM_MC_ADVANCE_RIP();
8056
8057 IEM_MC_END();
8058 return VINF_SUCCESS;
8059}
8060
8061
8062/**
8063 * Common worker for FPU instructions working on ST0 and STn, only affecting
8064 * flags, and popping twice when done.
8065 *
8066 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8067 */
8068FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8069{
8070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8071
8072 IEM_MC_BEGIN(3, 1);
8073 IEM_MC_LOCAL(uint16_t, u16Fsw);
8074 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8075 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8076 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8077
8078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8080
8081 IEM_MC_PREPARE_FPU_USAGE();
8082 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8083 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8084 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8085 IEM_MC_ELSE()
8086 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8087 IEM_MC_ENDIF();
8088 IEM_MC_ADVANCE_RIP();
8089
8090 IEM_MC_END();
8091 return VINF_SUCCESS;
8092}
8093
8094
8095/** Opcode 0xda 0xe9. */
8096FNIEMOP_DEF(iemOp_fucompp)
8097{
8098 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8099 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8100}
8101
8102
8103/**
8104 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8105 * the result in ST0.
8106 *
8107 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8108 */
8109FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8110{
8111 IEM_MC_BEGIN(3, 3);
8112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8113 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8114 IEM_MC_LOCAL(int32_t, i32Val2);
8115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8117 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8118
8119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8121
8122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8124 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8125
8126 IEM_MC_PREPARE_FPU_USAGE();
8127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8128 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8129 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8130 IEM_MC_ELSE()
8131 IEM_MC_FPU_STACK_UNDERFLOW(0);
8132 IEM_MC_ENDIF();
8133 IEM_MC_ADVANCE_RIP();
8134
8135 IEM_MC_END();
8136 return VINF_SUCCESS;
8137}
8138
8139
8140/** Opcode 0xda !11/0. */
8141FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8142{
8143 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8145}
8146
8147
8148/** Opcode 0xda !11/1. */
8149FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8150{
8151 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8153}
8154
8155
8156/** Opcode 0xda !11/2. */
8157FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8158{
8159 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8160
8161 IEM_MC_BEGIN(3, 3);
8162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8163 IEM_MC_LOCAL(uint16_t, u16Fsw);
8164 IEM_MC_LOCAL(int32_t, i32Val2);
8165 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8166 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8167 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8168
8169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8171
8172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8174 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8175
8176 IEM_MC_PREPARE_FPU_USAGE();
8177 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8178 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8179 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8180 IEM_MC_ELSE()
8181 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8182 IEM_MC_ENDIF();
8183 IEM_MC_ADVANCE_RIP();
8184
8185 IEM_MC_END();
8186 return VINF_SUCCESS;
8187}
8188
8189
8190/** Opcode 0xda !11/3. */
8191FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8192{
8193 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8194
8195 IEM_MC_BEGIN(3, 3);
8196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8197 IEM_MC_LOCAL(uint16_t, u16Fsw);
8198 IEM_MC_LOCAL(int32_t, i32Val2);
8199 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8201 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8202
8203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8205
8206 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8207 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8208 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8209
8210 IEM_MC_PREPARE_FPU_USAGE();
8211 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8212 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8213 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8214 IEM_MC_ELSE()
8215 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8216 IEM_MC_ENDIF();
8217 IEM_MC_ADVANCE_RIP();
8218
8219 IEM_MC_END();
8220 return VINF_SUCCESS;
8221}
8222
8223
8224/** Opcode 0xda !11/4. */
8225FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8226{
8227 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8228 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8229}
8230
8231
8232/** Opcode 0xda !11/5. */
8233FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8234{
8235 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8236 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8237}
8238
8239
8240/** Opcode 0xda !11/6. */
8241FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8242{
8243 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8244 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8245}
8246
8247
8248/** Opcode 0xda !11/7. */
8249FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8250{
8251 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8252 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8253}
8254
8255
8256/**
8257 * @opcode 0xda
8258 */
8259FNIEMOP_DEF(iemOp_EscF2)
8260{
8261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8262 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8264 {
8265 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8266 {
8267 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8268 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8269 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8270 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8271 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8272 case 5:
8273 if (bRm == 0xe9)
8274 return FNIEMOP_CALL(iemOp_fucompp);
8275 return IEMOP_RAISE_INVALID_OPCODE();
8276 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8277 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8279 }
8280 }
8281 else
8282 {
8283 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8284 {
8285 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8286 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8287 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8288 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8289 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8290 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8291 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8292 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8294 }
8295 }
8296}
8297
8298
8299/** Opcode 0xdb !11/0. */
8300FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8301{
8302 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8303
8304 IEM_MC_BEGIN(2, 3);
8305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8306 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8307 IEM_MC_LOCAL(int32_t, i32Val);
8308 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8309 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8310
8311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313
8314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8316 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8317
8318 IEM_MC_PREPARE_FPU_USAGE();
8319 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8320 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8321 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8322 IEM_MC_ELSE()
8323 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8324 IEM_MC_ENDIF();
8325 IEM_MC_ADVANCE_RIP();
8326
8327 IEM_MC_END();
8328 return VINF_SUCCESS;
8329}
8330
8331
8332/** Opcode 0xdb !11/1. */
8333FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8334{
8335 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8336 IEM_MC_BEGIN(3, 2);
8337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8338 IEM_MC_LOCAL(uint16_t, u16Fsw);
8339 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8340 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8342
8343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8345 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8346 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8347
8348 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8349 IEM_MC_PREPARE_FPU_USAGE();
8350 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8351 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8352 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8353 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8354 IEM_MC_ELSE()
8355 IEM_MC_IF_FCW_IM()
8356 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8357 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8358 IEM_MC_ENDIF();
8359 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8360 IEM_MC_ENDIF();
8361 IEM_MC_ADVANCE_RIP();
8362
8363 IEM_MC_END();
8364 return VINF_SUCCESS;
8365}
8366
8367
8368/** Opcode 0xdb !11/2. */
8369FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8370{
8371 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8372 IEM_MC_BEGIN(3, 2);
8373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8374 IEM_MC_LOCAL(uint16_t, u16Fsw);
8375 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8376 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8377 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8378
8379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8383
8384 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8385 IEM_MC_PREPARE_FPU_USAGE();
8386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8387 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8388 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8389 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8390 IEM_MC_ELSE()
8391 IEM_MC_IF_FCW_IM()
8392 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8393 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8394 IEM_MC_ENDIF();
8395 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8396 IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP();
8398
8399 IEM_MC_END();
8400 return VINF_SUCCESS;
8401}
8402
8403
8404/** Opcode 0xdb !11/3. */
8405FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8406{
8407 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8408 IEM_MC_BEGIN(3, 2);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8410 IEM_MC_LOCAL(uint16_t, u16Fsw);
8411 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8412 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8414
8415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8419
8420 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8421 IEM_MC_PREPARE_FPU_USAGE();
8422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8423 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8424 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8425 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8426 IEM_MC_ELSE()
8427 IEM_MC_IF_FCW_IM()
8428 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8429 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8430 IEM_MC_ENDIF();
8431 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8432 IEM_MC_ENDIF();
8433 IEM_MC_ADVANCE_RIP();
8434
8435 IEM_MC_END();
8436 return VINF_SUCCESS;
8437}
8438
8439
8440/** Opcode 0xdb !11/5. */
8441FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8442{
8443 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8444
8445 IEM_MC_BEGIN(2, 3);
8446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8447 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8448 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8449 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8450 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8451
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454
8455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8457 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8458
8459 IEM_MC_PREPARE_FPU_USAGE();
8460 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8461 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8462 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8463 IEM_MC_ELSE()
8464 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8465 IEM_MC_ENDIF();
8466 IEM_MC_ADVANCE_RIP();
8467
8468 IEM_MC_END();
8469 return VINF_SUCCESS;
8470}
8471
8472
8473/** Opcode 0xdb !11/7. */
8474FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8475{
8476 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8477 IEM_MC_BEGIN(3, 2);
8478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8479 IEM_MC_LOCAL(uint16_t, u16Fsw);
8480 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8481 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8482 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8483
8484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8488
8489 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8490 IEM_MC_PREPARE_FPU_USAGE();
8491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8493 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8494 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8495 IEM_MC_ELSE()
8496 IEM_MC_IF_FCW_IM()
8497 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8498 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8499 IEM_MC_ENDIF();
8500 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8501 IEM_MC_ENDIF();
8502 IEM_MC_ADVANCE_RIP();
8503
8504 IEM_MC_END();
8505 return VINF_SUCCESS;
8506}
8507
8508
8509/** Opcode 0xdb 11/0. */
8510FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514
8515 IEM_MC_BEGIN(0, 1);
8516 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8517
8518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8520
8521 IEM_MC_PREPARE_FPU_USAGE();
8522 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8523 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8524 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8525 IEM_MC_ENDIF();
8526 IEM_MC_UPDATE_FPU_OPCODE_IP();
8527 IEM_MC_ELSE()
8528 IEM_MC_FPU_STACK_UNDERFLOW(0);
8529 IEM_MC_ENDIF();
8530 IEM_MC_ADVANCE_RIP();
8531
8532 IEM_MC_END();
8533 return VINF_SUCCESS;
8534}
8535
8536
8537/** Opcode 0xdb 11/1. */
8538FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8539{
8540 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542
8543 IEM_MC_BEGIN(0, 1);
8544 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8545
8546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8548
8549 IEM_MC_PREPARE_FPU_USAGE();
8550 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8551 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8552 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8553 IEM_MC_ENDIF();
8554 IEM_MC_UPDATE_FPU_OPCODE_IP();
8555 IEM_MC_ELSE()
8556 IEM_MC_FPU_STACK_UNDERFLOW(0);
8557 IEM_MC_ENDIF();
8558 IEM_MC_ADVANCE_RIP();
8559
8560 IEM_MC_END();
8561 return VINF_SUCCESS;
8562}
8563
8564
8565/** Opcode 0xdb 11/2. */
8566FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8567{
8568 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570
8571 IEM_MC_BEGIN(0, 1);
8572 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8573
8574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8576
8577 IEM_MC_PREPARE_FPU_USAGE();
8578 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8579 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8580 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8581 IEM_MC_ENDIF();
8582 IEM_MC_UPDATE_FPU_OPCODE_IP();
8583 IEM_MC_ELSE()
8584 IEM_MC_FPU_STACK_UNDERFLOW(0);
8585 IEM_MC_ENDIF();
8586 IEM_MC_ADVANCE_RIP();
8587
8588 IEM_MC_END();
8589 return VINF_SUCCESS;
8590}
8591
8592
8593/** Opcode 0xdb 11/3. */
8594FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8595{
8596 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598
8599 IEM_MC_BEGIN(0, 1);
8600 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8601
8602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8603 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8604
8605 IEM_MC_PREPARE_FPU_USAGE();
8606 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8607 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8608 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8609 IEM_MC_ENDIF();
8610 IEM_MC_UPDATE_FPU_OPCODE_IP();
8611 IEM_MC_ELSE()
8612 IEM_MC_FPU_STACK_UNDERFLOW(0);
8613 IEM_MC_ENDIF();
8614 IEM_MC_ADVANCE_RIP();
8615
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618}
8619
8620
8621/** Opcode 0xdb 0xe0. */
8622FNIEMOP_DEF(iemOp_fneni)
8623{
8624 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 IEM_MC_BEGIN(0,0);
8627 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8628 IEM_MC_ADVANCE_RIP();
8629 IEM_MC_END();
8630 return VINF_SUCCESS;
8631}
8632
8633
8634/** Opcode 0xdb 0xe1. */
8635FNIEMOP_DEF(iemOp_fndisi)
8636{
8637 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8639 IEM_MC_BEGIN(0,0);
8640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8641 IEM_MC_ADVANCE_RIP();
8642 IEM_MC_END();
8643 return VINF_SUCCESS;
8644}
8645
8646
8647/** Opcode 0xdb 0xe2. */
8648FNIEMOP_DEF(iemOp_fnclex)
8649{
8650 IEMOP_MNEMONIC(fnclex, "fnclex");
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652
8653 IEM_MC_BEGIN(0,0);
8654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8655 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8656 IEM_MC_CLEAR_FSW_EX();
8657 IEM_MC_ADVANCE_RIP();
8658 IEM_MC_END();
8659 return VINF_SUCCESS;
8660}
8661
8662
8663/** Opcode 0xdb 0xe3. */
8664FNIEMOP_DEF(iemOp_fninit)
8665{
8666 IEMOP_MNEMONIC(fninit, "fninit");
8667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8668 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8669}
8670
8671
8672/** Opcode 0xdb 0xe4. */
8673FNIEMOP_DEF(iemOp_fnsetpm)
8674{
8675 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_BEGIN(0,0);
8678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8679 IEM_MC_ADVANCE_RIP();
8680 IEM_MC_END();
8681 return VINF_SUCCESS;
8682}
8683
8684
8685/** Opcode 0xdb 0xe5. */
8686FNIEMOP_DEF(iemOp_frstpm)
8687{
8688 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8689#if 0 /* #UDs on newer CPUs */
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_BEGIN(0,0);
8692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8693 IEM_MC_ADVANCE_RIP();
8694 IEM_MC_END();
8695 return VINF_SUCCESS;
8696#else
8697 return IEMOP_RAISE_INVALID_OPCODE();
8698#endif
8699}
8700
8701
8702/** Opcode 0xdb 11/5. */
8703FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8704{
8705 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8706 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8707}
8708
8709
8710/** Opcode 0xdb 11/6. */
8711FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8712{
8713 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8714 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8715}
8716
8717
8718/**
8719 * @opcode 0xdb
8720 */
8721FNIEMOP_DEF(iemOp_EscF3)
8722{
8723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8724 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8726 {
8727 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8728 {
8729 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8730 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8731 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8732 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8733 case 4:
8734 switch (bRm)
8735 {
8736 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8737 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8738 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8739 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8740 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8741 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8742 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8743 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8745 }
8746 break;
8747 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8748 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8749 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8751 }
8752 }
8753 else
8754 {
8755 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8756 {
8757 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8758 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8759 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8760 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8761 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8762 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8763 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8764 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8766 }
8767 }
8768}
8769
8770
8771/**
8772 * Common worker for FPU instructions working on STn and ST0, and storing the
8773 * result in STn unless IE, DE or ZE was raised.
8774 *
8775 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8776 */
8777FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8778{
8779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8780
8781 IEM_MC_BEGIN(3, 1);
8782 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8783 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8785 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8786
8787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8789
8790 IEM_MC_PREPARE_FPU_USAGE();
8791 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8792 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8793 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8794 IEM_MC_ELSE()
8795 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8796 IEM_MC_ENDIF();
8797 IEM_MC_ADVANCE_RIP();
8798
8799 IEM_MC_END();
8800 return VINF_SUCCESS;
8801}
8802
8803
8804/** Opcode 0xdc 11/0. */
8805FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8806{
8807 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8808 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8809}
8810
8811
8812/** Opcode 0xdc 11/1. */
8813FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8814{
8815 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8816 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8817}
8818
8819
8820/** Opcode 0xdc 11/4. */
8821FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8822{
8823 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8824 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8825}
8826
8827
8828/** Opcode 0xdc 11/5. */
8829FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8830{
8831 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8832 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8833}
8834
8835
8836/** Opcode 0xdc 11/6. */
8837FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8838{
8839 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8840 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8841}
8842
8843
8844/** Opcode 0xdc 11/7. */
8845FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8846{
8847 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8848 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8849}
8850
8851
8852/**
8853 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8854 * memory operand, and storing the result in ST0.
8855 *
8856 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8857 */
8858FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8859{
8860 IEM_MC_BEGIN(3, 3);
8861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8862 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8863 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8864 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8865 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8866 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8867
8868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8872
8873 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8874 IEM_MC_PREPARE_FPU_USAGE();
8875 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8876 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8877 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8878 IEM_MC_ELSE()
8879 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8880 IEM_MC_ENDIF();
8881 IEM_MC_ADVANCE_RIP();
8882
8883 IEM_MC_END();
8884 return VINF_SUCCESS;
8885}
8886
8887
8888/** Opcode 0xdc !11/0. */
8889FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8890{
8891 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8892 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8893}
8894
8895
8896/** Opcode 0xdc !11/1. */
8897FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8898{
8899 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8900 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8901}
8902
8903
8904/** Opcode 0xdc !11/2. */
8905FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8906{
8907 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8908
8909 IEM_MC_BEGIN(3, 3);
8910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8911 IEM_MC_LOCAL(uint16_t, u16Fsw);
8912 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8913 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8914 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8915 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8916
8917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8919
8920 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8921 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8922 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8923
8924 IEM_MC_PREPARE_FPU_USAGE();
8925 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8926 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8927 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8928 IEM_MC_ELSE()
8929 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8930 IEM_MC_ENDIF();
8931 IEM_MC_ADVANCE_RIP();
8932
8933 IEM_MC_END();
8934 return VINF_SUCCESS;
8935}
8936
8937
8938/** Opcode 0xdc !11/3. */
8939FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8940{
8941 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8942
8943 IEM_MC_BEGIN(3, 3);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8945 IEM_MC_LOCAL(uint16_t, u16Fsw);
8946 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8947 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8949 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8950
8951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8953
8954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8956 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8957
8958 IEM_MC_PREPARE_FPU_USAGE();
8959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8960 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8961 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8962 IEM_MC_ELSE()
8963 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8964 IEM_MC_ENDIF();
8965 IEM_MC_ADVANCE_RIP();
8966
8967 IEM_MC_END();
8968 return VINF_SUCCESS;
8969}
8970
8971
8972/** Opcode 0xdc !11/4. */
8973FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8974{
8975 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8976 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8977}
8978
8979
8980/** Opcode 0xdc !11/5. */
8981FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8982{
8983 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8984 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8985}
8986
8987
8988/** Opcode 0xdc !11/6. */
8989FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8990{
8991 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8992 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8993}
8994
8995
8996/** Opcode 0xdc !11/7. */
8997FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8998{
8999 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9000 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9001}
9002
9003
9004/**
9005 * @opcode 0xdc
9006 */
9007FNIEMOP_DEF(iemOp_EscF4)
9008{
9009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9010 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9012 {
9013 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9014 {
9015 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9016 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9017 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9018 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9019 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9020 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9021 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9022 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9024 }
9025 }
9026 else
9027 {
9028 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9029 {
9030 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9031 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9032 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9033 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9034 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9035 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9036 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9037 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9039 }
9040 }
9041}
9042
9043
9044/** Opcode 0xdd !11/0.
9045 * @sa iemOp_fld_m32r */
9046FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9047{
9048 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9049
9050 IEM_MC_BEGIN(2, 3);
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9052 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9053 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9054 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9055 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9056
9057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9061
9062 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9063 IEM_MC_PREPARE_FPU_USAGE();
9064 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9065 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9066 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9067 IEM_MC_ELSE()
9068 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9069 IEM_MC_ENDIF();
9070 IEM_MC_ADVANCE_RIP();
9071
9072 IEM_MC_END();
9073 return VINF_SUCCESS;
9074}
9075
9076
9077/** Opcode 0xdd !11/0. */
9078FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9079{
9080 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9081 IEM_MC_BEGIN(3, 2);
9082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9083 IEM_MC_LOCAL(uint16_t, u16Fsw);
9084 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9085 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9086 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9087
9088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9092
9093 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9094 IEM_MC_PREPARE_FPU_USAGE();
9095 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9096 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9097 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9098 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9099 IEM_MC_ELSE()
9100 IEM_MC_IF_FCW_IM()
9101 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9102 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9103 IEM_MC_ENDIF();
9104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9105 IEM_MC_ENDIF();
9106 IEM_MC_ADVANCE_RIP();
9107
9108 IEM_MC_END();
9109 return VINF_SUCCESS;
9110}
9111
9112
9113/** Opcode 0xdd !11/0. */
9114FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9115{
9116 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9117 IEM_MC_BEGIN(3, 2);
9118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9119 IEM_MC_LOCAL(uint16_t, u16Fsw);
9120 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9121 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9123
9124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9127 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9128
9129 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9130 IEM_MC_PREPARE_FPU_USAGE();
9131 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9132 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9133 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9134 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9135 IEM_MC_ELSE()
9136 IEM_MC_IF_FCW_IM()
9137 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9138 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9139 IEM_MC_ENDIF();
9140 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9141 IEM_MC_ENDIF();
9142 IEM_MC_ADVANCE_RIP();
9143
9144 IEM_MC_END();
9145 return VINF_SUCCESS;
9146}
9147
9148
9149
9150
9151/** Opcode 0xdd !11/0. */
9152FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9153{
9154 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9155 IEM_MC_BEGIN(3, 2);
9156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9157 IEM_MC_LOCAL(uint16_t, u16Fsw);
9158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9159 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9161
9162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9166
9167 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9168 IEM_MC_PREPARE_FPU_USAGE();
9169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9171 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9172 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9173 IEM_MC_ELSE()
9174 IEM_MC_IF_FCW_IM()
9175 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9176 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9177 IEM_MC_ENDIF();
9178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9179 IEM_MC_ENDIF();
9180 IEM_MC_ADVANCE_RIP();
9181
9182 IEM_MC_END();
9183 return VINF_SUCCESS;
9184}
9185
9186
9187/** Opcode 0xdd !11/0. */
9188FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9189{
9190 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9191 IEM_MC_BEGIN(3, 0);
9192 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9193 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9194 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9198 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9199 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9200 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9201 IEM_MC_END();
9202 return VINF_SUCCESS;
9203}
9204
9205
9206/** Opcode 0xdd !11/0. */
9207FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9208{
9209 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9210 IEM_MC_BEGIN(3, 0);
9211 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9212 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9213 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9217 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9218 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9219 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9220 IEM_MC_END();
9221 return VINF_SUCCESS;
9222
9223}
9224
9225/** Opcode 0xdd !11/0. */
9226FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9227{
9228 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9229
9230 IEM_MC_BEGIN(0, 2);
9231 IEM_MC_LOCAL(uint16_t, u16Tmp);
9232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9233
9234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9237
9238 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9239 IEM_MC_FETCH_FSW(u16Tmp);
9240 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9241 IEM_MC_ADVANCE_RIP();
9242
9243/** @todo Debug / drop a hint to the verifier that things may differ
9244 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9245 * NT4SP1. (X86_FSW_PE) */
9246 IEM_MC_END();
9247 return VINF_SUCCESS;
9248}
9249
9250
9251/** Opcode 0xdd 11/0. */
9252FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9253{
9254 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9256 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9257 unmodified. */
9258
9259 IEM_MC_BEGIN(0, 0);
9260
9261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9262 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9263
9264 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9265 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9266 IEM_MC_UPDATE_FPU_OPCODE_IP();
9267
9268 IEM_MC_ADVANCE_RIP();
9269 IEM_MC_END();
9270 return VINF_SUCCESS;
9271}
9272
9273
9274/** Opcode 0xdd 11/1. */
9275FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9276{
9277 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9279
9280 IEM_MC_BEGIN(0, 2);
9281 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9282 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9285
9286 IEM_MC_PREPARE_FPU_USAGE();
9287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9288 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9289 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9290 IEM_MC_ELSE()
9291 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9292 IEM_MC_ENDIF();
9293
9294 IEM_MC_ADVANCE_RIP();
9295 IEM_MC_END();
9296 return VINF_SUCCESS;
9297}
9298
9299
9300/** Opcode 0xdd 11/3. */
9301FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9302{
9303 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9304 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9305}
9306
9307
9308/** Opcode 0xdd 11/4. */
9309FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9310{
9311 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9312 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9313}
9314
9315
9316/**
9317 * @opcode 0xdd
9318 */
9319FNIEMOP_DEF(iemOp_EscF5)
9320{
9321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9322 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9324 {
9325 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9326 {
9327 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9328 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9329 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9330 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9331 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9332 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9333 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9334 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9336 }
9337 }
9338 else
9339 {
9340 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9341 {
9342 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9343 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9344 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9345 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9346 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9347 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9348 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9349 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9351 }
9352 }
9353}
9354
9355
9356/** Opcode 0xde 11/0. */
9357FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9358{
9359 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9360 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9361}
9362
9363
9364/** Opcode 0xde 11/0. */
9365FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9366{
9367 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9368 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9369}
9370
9371
9372/** Opcode 0xde 0xd9. */
9373FNIEMOP_DEF(iemOp_fcompp)
9374{
9375 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9376 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9377}
9378
9379
9380/** Opcode 0xde 11/4. */
9381FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9384 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xde 11/5. */
9389FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9392 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9393}
9394
9395
9396/** Opcode 0xde 11/6. */
9397FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9400 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9401}
9402
9403
9404/** Opcode 0xde 11/7. */
9405FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9406{
9407 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9408 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9409}
9410
9411
9412/**
9413 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9414 * the result in ST0.
9415 *
9416 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9417 */
9418FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9419{
9420 IEM_MC_BEGIN(3, 3);
9421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9422 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9423 IEM_MC_LOCAL(int16_t, i16Val2);
9424 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9426 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9427
9428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9430
9431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9433 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9434
9435 IEM_MC_PREPARE_FPU_USAGE();
9436 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9437 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9438 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9439 IEM_MC_ELSE()
9440 IEM_MC_FPU_STACK_UNDERFLOW(0);
9441 IEM_MC_ENDIF();
9442 IEM_MC_ADVANCE_RIP();
9443
9444 IEM_MC_END();
9445 return VINF_SUCCESS;
9446}
9447
9448
9449/** Opcode 0xde !11/0. */
9450FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9451{
9452 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9453 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9454}
9455
9456
9457/** Opcode 0xde !11/1. */
9458FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9459{
9460 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9461 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9462}
9463
9464
9465/** Opcode 0xde !11/2. */
9466FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9467{
9468 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9469
9470 IEM_MC_BEGIN(3, 3);
9471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9472 IEM_MC_LOCAL(uint16_t, u16Fsw);
9473 IEM_MC_LOCAL(int16_t, i16Val2);
9474 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9475 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9476 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9477
9478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9480
9481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9483 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9484
9485 IEM_MC_PREPARE_FPU_USAGE();
9486 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9487 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9488 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9489 IEM_MC_ELSE()
9490 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9491 IEM_MC_ENDIF();
9492 IEM_MC_ADVANCE_RIP();
9493
9494 IEM_MC_END();
9495 return VINF_SUCCESS;
9496}
9497
9498
9499/** Opcode 0xde !11/3. */
9500FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9501{
9502 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9503
9504 IEM_MC_BEGIN(3, 3);
9505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9506 IEM_MC_LOCAL(uint16_t, u16Fsw);
9507 IEM_MC_LOCAL(int16_t, i16Val2);
9508 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9510 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9511
9512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9514
9515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9516 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9517 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9518
9519 IEM_MC_PREPARE_FPU_USAGE();
9520 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9521 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9522 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9523 IEM_MC_ELSE()
9524 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9525 IEM_MC_ENDIF();
9526 IEM_MC_ADVANCE_RIP();
9527
9528 IEM_MC_END();
9529 return VINF_SUCCESS;
9530}
9531
9532
9533/** Opcode 0xde !11/4. */
9534FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9535{
9536 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9537 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9538}
9539
9540
9541/** Opcode 0xde !11/5. */
9542FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9543{
9544 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9545 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9546}
9547
9548
9549/** Opcode 0xde !11/6. */
9550FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9551{
9552 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9553 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9554}
9555
9556
9557/** Opcode 0xde !11/7. */
9558FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9559{
9560 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9561 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9562}
9563
9564
9565/**
9566 * @opcode 0xde
9567 */
9568FNIEMOP_DEF(iemOp_EscF6)
9569{
9570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9571 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9573 {
9574 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9575 {
9576 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9577 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9578 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9579 case 3: if (bRm == 0xd9)
9580 return FNIEMOP_CALL(iemOp_fcompp);
9581 return IEMOP_RAISE_INVALID_OPCODE();
9582 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9583 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9584 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9585 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9587 }
9588 }
9589 else
9590 {
9591 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9592 {
9593 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9594 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9595 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9596 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9597 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9598 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9599 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9600 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9602 }
9603 }
9604}
9605
9606
9607/** Opcode 0xdf 11/0.
9608 * Undocument instruction, assumed to work like ffree + fincstp. */
9609FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9610{
9611 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9613
9614 IEM_MC_BEGIN(0, 0);
9615
9616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9618
9619 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9620 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9621 IEM_MC_FPU_STACK_INC_TOP();
9622 IEM_MC_UPDATE_FPU_OPCODE_IP();
9623
9624 IEM_MC_ADVANCE_RIP();
9625 IEM_MC_END();
9626 return VINF_SUCCESS;
9627}
9628
9629
9630/** Opcode 0xdf 0xe0. */
9631FNIEMOP_DEF(iemOp_fnstsw_ax)
9632{
9633 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9635
9636 IEM_MC_BEGIN(0, 1);
9637 IEM_MC_LOCAL(uint16_t, u16Tmp);
9638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9639 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9640 IEM_MC_FETCH_FSW(u16Tmp);
9641 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9642 IEM_MC_ADVANCE_RIP();
9643 IEM_MC_END();
9644 return VINF_SUCCESS;
9645}
9646
9647
9648/** Opcode 0xdf 11/5. */
9649FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9650{
9651 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9652 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9653}
9654
9655
9656/** Opcode 0xdf 11/6. */
9657FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9658{
9659 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9660 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9661}
9662
9663
9664/** Opcode 0xdf !11/0. */
9665FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9666{
9667 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9668
9669 IEM_MC_BEGIN(2, 3);
9670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9671 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9672 IEM_MC_LOCAL(int16_t, i16Val);
9673 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9674 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9675
9676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9678
9679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9681 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9682
9683 IEM_MC_PREPARE_FPU_USAGE();
9684 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9685 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9686 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9687 IEM_MC_ELSE()
9688 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9689 IEM_MC_ENDIF();
9690 IEM_MC_ADVANCE_RIP();
9691
9692 IEM_MC_END();
9693 return VINF_SUCCESS;
9694}
9695
9696
9697/** Opcode 0xdf !11/1. */
9698FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9699{
9700 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9701 IEM_MC_BEGIN(3, 2);
9702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9703 IEM_MC_LOCAL(uint16_t, u16Fsw);
9704 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9705 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9706 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9707
9708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9710 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9711 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9712
9713 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9714 IEM_MC_PREPARE_FPU_USAGE();
9715 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9716 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9717 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9718 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9719 IEM_MC_ELSE()
9720 IEM_MC_IF_FCW_IM()
9721 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9722 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9723 IEM_MC_ENDIF();
9724 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9725 IEM_MC_ENDIF();
9726 IEM_MC_ADVANCE_RIP();
9727
9728 IEM_MC_END();
9729 return VINF_SUCCESS;
9730}
9731
9732
9733/** Opcode 0xdf !11/2. */
9734FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9735{
9736 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9737 IEM_MC_BEGIN(3, 2);
9738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9739 IEM_MC_LOCAL(uint16_t, u16Fsw);
9740 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9741 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9742 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9743
9744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9748
9749 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9750 IEM_MC_PREPARE_FPU_USAGE();
9751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9752 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9753 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9754 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9755 IEM_MC_ELSE()
9756 IEM_MC_IF_FCW_IM()
9757 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9758 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9759 IEM_MC_ENDIF();
9760 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9761 IEM_MC_ENDIF();
9762 IEM_MC_ADVANCE_RIP();
9763
9764 IEM_MC_END();
9765 return VINF_SUCCESS;
9766}
9767
9768
9769/** Opcode 0xdf !11/3. */
9770FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9771{
9772 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9773 IEM_MC_BEGIN(3, 2);
9774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9775 IEM_MC_LOCAL(uint16_t, u16Fsw);
9776 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9777 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9778 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9779
9780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9784
9785 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9786 IEM_MC_PREPARE_FPU_USAGE();
9787 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9788 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9789 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9790 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9791 IEM_MC_ELSE()
9792 IEM_MC_IF_FCW_IM()
9793 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9794 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9795 IEM_MC_ENDIF();
9796 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9797 IEM_MC_ENDIF();
9798 IEM_MC_ADVANCE_RIP();
9799
9800 IEM_MC_END();
9801 return VINF_SUCCESS;
9802}
9803
9804
9805/** Opcode 0xdf !11/4. */
9806FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9807
9808
9809/** Opcode 0xdf !11/5. */
9810FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9811{
9812 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9813
9814 IEM_MC_BEGIN(2, 3);
9815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9816 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9817 IEM_MC_LOCAL(int64_t, i64Val);
9818 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9819 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9820
9821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823
9824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9825 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9826 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9827
9828 IEM_MC_PREPARE_FPU_USAGE();
9829 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9830 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9831 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9832 IEM_MC_ELSE()
9833 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9834 IEM_MC_ENDIF();
9835 IEM_MC_ADVANCE_RIP();
9836
9837 IEM_MC_END();
9838 return VINF_SUCCESS;
9839}
9840
9841
9842/** Opcode 0xdf !11/6. */
9843FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9844
9845
9846/** Opcode 0xdf !11/7. */
9847FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9848{
9849 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9850 IEM_MC_BEGIN(3, 2);
9851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9852 IEM_MC_LOCAL(uint16_t, u16Fsw);
9853 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9854 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9855 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9856
9857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9859 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9860 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9861
9862 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9863 IEM_MC_PREPARE_FPU_USAGE();
9864 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9865 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9866 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9867 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9868 IEM_MC_ELSE()
9869 IEM_MC_IF_FCW_IM()
9870 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9871 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9872 IEM_MC_ENDIF();
9873 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9874 IEM_MC_ENDIF();
9875 IEM_MC_ADVANCE_RIP();
9876
9877 IEM_MC_END();
9878 return VINF_SUCCESS;
9879}
9880
9881
9882/**
9883 * @opcode 0xdf
9884 */
9885FNIEMOP_DEF(iemOp_EscF7)
9886{
9887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9889 {
9890 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9891 {
9892 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9893 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9894 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9895 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9896 case 4: if (bRm == 0xe0)
9897 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9898 return IEMOP_RAISE_INVALID_OPCODE();
9899 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9900 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9901 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9903 }
9904 }
9905 else
9906 {
9907 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9908 {
9909 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9910 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9911 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9912 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9913 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9914 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9915 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9916 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9918 }
9919 }
9920}
9921
9922
9923/**
9924 * @opcode 0xe0
9925 */
9926FNIEMOP_DEF(iemOp_loopne_Jb)
9927{
9928 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9929 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9931 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9932
9933 switch (pVCpu->iem.s.enmEffAddrMode)
9934 {
9935 case IEMMODE_16BIT:
9936 IEM_MC_BEGIN(0,0);
9937 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9938 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9939 IEM_MC_REL_JMP_S8(i8Imm);
9940 } IEM_MC_ELSE() {
9941 IEM_MC_ADVANCE_RIP();
9942 } IEM_MC_ENDIF();
9943 IEM_MC_END();
9944 return VINF_SUCCESS;
9945
9946 case IEMMODE_32BIT:
9947 IEM_MC_BEGIN(0,0);
9948 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9949 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9950 IEM_MC_REL_JMP_S8(i8Imm);
9951 } IEM_MC_ELSE() {
9952 IEM_MC_ADVANCE_RIP();
9953 } IEM_MC_ENDIF();
9954 IEM_MC_END();
9955 return VINF_SUCCESS;
9956
9957 case IEMMODE_64BIT:
9958 IEM_MC_BEGIN(0,0);
9959 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9960 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9961 IEM_MC_REL_JMP_S8(i8Imm);
9962 } IEM_MC_ELSE() {
9963 IEM_MC_ADVANCE_RIP();
9964 } IEM_MC_ENDIF();
9965 IEM_MC_END();
9966 return VINF_SUCCESS;
9967
9968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9969 }
9970}
9971
9972
9973/**
9974 * @opcode 0xe1
9975 */
9976FNIEMOP_DEF(iemOp_loope_Jb)
9977{
9978 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9979 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9982
9983 switch (pVCpu->iem.s.enmEffAddrMode)
9984 {
9985 case IEMMODE_16BIT:
9986 IEM_MC_BEGIN(0,0);
9987 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9988 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9989 IEM_MC_REL_JMP_S8(i8Imm);
9990 } IEM_MC_ELSE() {
9991 IEM_MC_ADVANCE_RIP();
9992 } IEM_MC_ENDIF();
9993 IEM_MC_END();
9994 return VINF_SUCCESS;
9995
9996 case IEMMODE_32BIT:
9997 IEM_MC_BEGIN(0,0);
9998 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9999 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10000 IEM_MC_REL_JMP_S8(i8Imm);
10001 } IEM_MC_ELSE() {
10002 IEM_MC_ADVANCE_RIP();
10003 } IEM_MC_ENDIF();
10004 IEM_MC_END();
10005 return VINF_SUCCESS;
10006
10007 case IEMMODE_64BIT:
10008 IEM_MC_BEGIN(0,0);
10009 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10010 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10011 IEM_MC_REL_JMP_S8(i8Imm);
10012 } IEM_MC_ELSE() {
10013 IEM_MC_ADVANCE_RIP();
10014 } IEM_MC_ENDIF();
10015 IEM_MC_END();
10016 return VINF_SUCCESS;
10017
10018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10019 }
10020}
10021
10022
10023/**
10024 * @opcode 0xe2
10025 */
10026FNIEMOP_DEF(iemOp_loop_Jb)
10027{
10028 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10029 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10032
10033 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10034 * using the 32-bit operand size override. How can that be restarted? See
10035 * weird pseudo code in intel manual. */
10036 switch (pVCpu->iem.s.enmEffAddrMode)
10037 {
10038 case IEMMODE_16BIT:
10039 IEM_MC_BEGIN(0,0);
10040 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10041 {
10042 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10043 IEM_MC_IF_CX_IS_NZ() {
10044 IEM_MC_REL_JMP_S8(i8Imm);
10045 } IEM_MC_ELSE() {
10046 IEM_MC_ADVANCE_RIP();
10047 } IEM_MC_ENDIF();
10048 }
10049 else
10050 {
10051 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10052 IEM_MC_ADVANCE_RIP();
10053 }
10054 IEM_MC_END();
10055 return VINF_SUCCESS;
10056
10057 case IEMMODE_32BIT:
10058 IEM_MC_BEGIN(0,0);
10059 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10060 {
10061 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10062 IEM_MC_IF_ECX_IS_NZ() {
10063 IEM_MC_REL_JMP_S8(i8Imm);
10064 } IEM_MC_ELSE() {
10065 IEM_MC_ADVANCE_RIP();
10066 } IEM_MC_ENDIF();
10067 }
10068 else
10069 {
10070 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10071 IEM_MC_ADVANCE_RIP();
10072 }
10073 IEM_MC_END();
10074 return VINF_SUCCESS;
10075
10076 case IEMMODE_64BIT:
10077 IEM_MC_BEGIN(0,0);
10078 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10079 {
10080 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10081 IEM_MC_IF_RCX_IS_NZ() {
10082 IEM_MC_REL_JMP_S8(i8Imm);
10083 } IEM_MC_ELSE() {
10084 IEM_MC_ADVANCE_RIP();
10085 } IEM_MC_ENDIF();
10086 }
10087 else
10088 {
10089 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10090 IEM_MC_ADVANCE_RIP();
10091 }
10092 IEM_MC_END();
10093 return VINF_SUCCESS;
10094
10095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10096 }
10097}
10098
10099
10100/**
10101 * @opcode 0xe3
10102 */
10103FNIEMOP_DEF(iemOp_jecxz_Jb)
10104{
10105 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10106 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10108 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10109
10110 switch (pVCpu->iem.s.enmEffAddrMode)
10111 {
10112 case IEMMODE_16BIT:
10113 IEM_MC_BEGIN(0,0);
10114 IEM_MC_IF_CX_IS_NZ() {
10115 IEM_MC_ADVANCE_RIP();
10116 } IEM_MC_ELSE() {
10117 IEM_MC_REL_JMP_S8(i8Imm);
10118 } IEM_MC_ENDIF();
10119 IEM_MC_END();
10120 return VINF_SUCCESS;
10121
10122 case IEMMODE_32BIT:
10123 IEM_MC_BEGIN(0,0);
10124 IEM_MC_IF_ECX_IS_NZ() {
10125 IEM_MC_ADVANCE_RIP();
10126 } IEM_MC_ELSE() {
10127 IEM_MC_REL_JMP_S8(i8Imm);
10128 } IEM_MC_ENDIF();
10129 IEM_MC_END();
10130 return VINF_SUCCESS;
10131
10132 case IEMMODE_64BIT:
10133 IEM_MC_BEGIN(0,0);
10134 IEM_MC_IF_RCX_IS_NZ() {
10135 IEM_MC_ADVANCE_RIP();
10136 } IEM_MC_ELSE() {
10137 IEM_MC_REL_JMP_S8(i8Imm);
10138 } IEM_MC_ENDIF();
10139 IEM_MC_END();
10140 return VINF_SUCCESS;
10141
10142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10143 }
10144}
10145
10146
10147/** Opcode 0xe4 */
10148FNIEMOP_DEF(iemOp_in_AL_Ib)
10149{
10150 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10153 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10154}
10155
10156
10157/** Opcode 0xe5 */
10158FNIEMOP_DEF(iemOp_in_eAX_Ib)
10159{
10160 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10161 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10163 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10164}
10165
10166
10167/** Opcode 0xe6 */
10168FNIEMOP_DEF(iemOp_out_Ib_AL)
10169{
10170 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10171 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10173 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10174}
10175
10176
10177/** Opcode 0xe7 */
10178FNIEMOP_DEF(iemOp_out_Ib_eAX)
10179{
10180 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10181 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10183 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10184}
10185
10186
10187/**
10188 * @opcode 0xe8
10189 */
10190FNIEMOP_DEF(iemOp_call_Jv)
10191{
10192 IEMOP_MNEMONIC(call_Jv, "call Jv");
10193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10194 switch (pVCpu->iem.s.enmEffOpSize)
10195 {
10196 case IEMMODE_16BIT:
10197 {
10198 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10199 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10200 }
10201
10202 case IEMMODE_32BIT:
10203 {
10204 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10205 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10206 }
10207
10208 case IEMMODE_64BIT:
10209 {
10210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10211 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10212 }
10213
10214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10215 }
10216}
10217
10218
10219/**
10220 * @opcode 0xe9
10221 */
10222FNIEMOP_DEF(iemOp_jmp_Jv)
10223{
10224 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10225 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10226 switch (pVCpu->iem.s.enmEffOpSize)
10227 {
10228 case IEMMODE_16BIT:
10229 {
10230 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10231 IEM_MC_BEGIN(0, 0);
10232 IEM_MC_REL_JMP_S16(i16Imm);
10233 IEM_MC_END();
10234 return VINF_SUCCESS;
10235 }
10236
10237 case IEMMODE_64BIT:
10238 case IEMMODE_32BIT:
10239 {
10240 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10241 IEM_MC_BEGIN(0, 0);
10242 IEM_MC_REL_JMP_S32(i32Imm);
10243 IEM_MC_END();
10244 return VINF_SUCCESS;
10245 }
10246
10247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10248 }
10249}
10250
10251
10252/**
10253 * @opcode 0xea
10254 */
10255FNIEMOP_DEF(iemOp_jmp_Ap)
10256{
10257 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10258 IEMOP_HLP_NO_64BIT();
10259
10260 /* Decode the far pointer address and pass it on to the far call C implementation. */
10261 uint32_t offSeg;
10262 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10263 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10264 else
10265 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10266 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10268 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10269}
10270
10271
10272/**
10273 * @opcode 0xeb
10274 */
10275FNIEMOP_DEF(iemOp_jmp_Jb)
10276{
10277 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10278 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10281
10282 IEM_MC_BEGIN(0, 0);
10283 IEM_MC_REL_JMP_S8(i8Imm);
10284 IEM_MC_END();
10285 return VINF_SUCCESS;
10286}
10287
10288
10289/** Opcode 0xec */
10290FNIEMOP_DEF(iemOp_in_AL_DX)
10291{
10292 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10294 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10295}
10296
10297
10298/** Opcode 0xed */
10299FNIEMOP_DEF(iemOp_eAX_DX)
10300{
10301 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10303 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10304}
10305
10306
10307/** Opcode 0xee */
10308FNIEMOP_DEF(iemOp_out_DX_AL)
10309{
10310 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10312 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10313}
10314
10315
10316/** Opcode 0xef */
10317FNIEMOP_DEF(iemOp_out_DX_eAX)
10318{
10319 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10321 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10322}
10323
10324
10325/**
10326 * @opcode 0xf0
10327 */
10328FNIEMOP_DEF(iemOp_lock)
10329{
10330 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10331 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10332
10333 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10334 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10335}
10336
10337
10338/**
10339 * @opcode 0xf1
10340 */
10341FNIEMOP_DEF(iemOp_int1)
10342{
10343 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10344 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10345 /** @todo testcase! */
10346 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10347}
10348
10349
10350/**
10351 * @opcode 0xf2
10352 */
10353FNIEMOP_DEF(iemOp_repne)
10354{
10355 /* This overrides any previous REPE prefix. */
10356 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10357 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10358 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10359
10360 /* For the 4 entry opcode tables, REPNZ overrides any previous
10361 REPZ and operand size prefixes. */
10362 pVCpu->iem.s.idxPrefix = 3;
10363
10364 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10365 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10366}
10367
10368
10369/**
10370 * @opcode 0xf3
10371 */
10372FNIEMOP_DEF(iemOp_repe)
10373{
10374 /* This overrides any previous REPNE prefix. */
10375 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10376 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10377 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10378
10379 /* For the 4 entry opcode tables, REPNZ overrides any previous
10380 REPNZ and operand size prefixes. */
10381 pVCpu->iem.s.idxPrefix = 2;
10382
10383 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10384 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10385}
10386
10387
10388/**
10389 * @opcode 0xf4
10390 */
10391FNIEMOP_DEF(iemOp_hlt)
10392{
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10395}
10396
10397
10398/**
10399 * @opcode 0xf5
10400 */
10401FNIEMOP_DEF(iemOp_cmc)
10402{
10403 IEMOP_MNEMONIC(cmc, "cmc");
10404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10405 IEM_MC_BEGIN(0, 0);
10406 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10407 IEM_MC_ADVANCE_RIP();
10408 IEM_MC_END();
10409 return VINF_SUCCESS;
10410}
10411
10412
10413/**
10414 * Common implementation of 'inc/dec/not/neg Eb'.
10415 *
10416 * @param bRm The RM byte.
10417 * @param pImpl The instruction implementation.
10418 */
10419FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10420{
10421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10422 {
10423 /* register access */
10424 IEM_MC_BEGIN(2, 0);
10425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10426 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10427 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10428 IEM_MC_REF_EFLAGS(pEFlags);
10429 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10430 IEM_MC_ADVANCE_RIP();
10431 IEM_MC_END();
10432 }
10433 else
10434 {
10435 /* memory access. */
10436 IEM_MC_BEGIN(2, 2);
10437 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10438 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10440
10441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10442 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10443 IEM_MC_FETCH_EFLAGS(EFlags);
10444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10445 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10446 else
10447 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10448
10449 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10450 IEM_MC_COMMIT_EFLAGS(EFlags);
10451 IEM_MC_ADVANCE_RIP();
10452 IEM_MC_END();
10453 }
10454 return VINF_SUCCESS;
10455}
10456
10457
10458/**
10459 * Common implementation of 'inc/dec/not/neg Ev'.
10460 *
10461 * @param bRm The RM byte.
10462 * @param pImpl The instruction implementation.
10463 */
10464FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10465{
10466 /* Registers are handled by a common worker. */
10467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10468 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10469
10470 /* Memory we do here. */
10471 switch (pVCpu->iem.s.enmEffOpSize)
10472 {
10473 case IEMMODE_16BIT:
10474 IEM_MC_BEGIN(2, 2);
10475 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10478
10479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10480 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10481 IEM_MC_FETCH_EFLAGS(EFlags);
10482 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10483 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10484 else
10485 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10486
10487 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10488 IEM_MC_COMMIT_EFLAGS(EFlags);
10489 IEM_MC_ADVANCE_RIP();
10490 IEM_MC_END();
10491 return VINF_SUCCESS;
10492
10493 case IEMMODE_32BIT:
10494 IEM_MC_BEGIN(2, 2);
10495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10496 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10498
10499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10500 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10501 IEM_MC_FETCH_EFLAGS(EFlags);
10502 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10503 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10504 else
10505 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10506
10507 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10508 IEM_MC_COMMIT_EFLAGS(EFlags);
10509 IEM_MC_ADVANCE_RIP();
10510 IEM_MC_END();
10511 return VINF_SUCCESS;
10512
10513 case IEMMODE_64BIT:
10514 IEM_MC_BEGIN(2, 2);
10515 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10516 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10518
10519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10520 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10521 IEM_MC_FETCH_EFLAGS(EFlags);
10522 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10523 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10524 else
10525 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10526
10527 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10528 IEM_MC_COMMIT_EFLAGS(EFlags);
10529 IEM_MC_ADVANCE_RIP();
10530 IEM_MC_END();
10531 return VINF_SUCCESS;
10532
10533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10534 }
10535}
10536
10537
10538/** Opcode 0xf6 /0. */
10539FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10540{
10541 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10542 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10543
10544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10545 {
10546 /* register access */
10547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10549
10550 IEM_MC_BEGIN(3, 0);
10551 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10552 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10554 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10555 IEM_MC_REF_EFLAGS(pEFlags);
10556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10557 IEM_MC_ADVANCE_RIP();
10558 IEM_MC_END();
10559 }
10560 else
10561 {
10562 /* memory access. */
10563 IEM_MC_BEGIN(3, 2);
10564 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10565 IEM_MC_ARG(uint8_t, u8Src, 1);
10566 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10568
10569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10570 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10571 IEM_MC_ASSIGN(u8Src, u8Imm);
10572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10573 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10574 IEM_MC_FETCH_EFLAGS(EFlags);
10575 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10576
10577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10578 IEM_MC_COMMIT_EFLAGS(EFlags);
10579 IEM_MC_ADVANCE_RIP();
10580 IEM_MC_END();
10581 }
10582 return VINF_SUCCESS;
10583}
10584
10585
10586/** Opcode 0xf7 /0. */
10587FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10588{
10589 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10591
10592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10593 {
10594 /* register access */
10595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10596 switch (pVCpu->iem.s.enmEffOpSize)
10597 {
10598 case IEMMODE_16BIT:
10599 {
10600 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10601 IEM_MC_BEGIN(3, 0);
10602 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10603 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10604 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10605 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10606 IEM_MC_REF_EFLAGS(pEFlags);
10607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10608 IEM_MC_ADVANCE_RIP();
10609 IEM_MC_END();
10610 return VINF_SUCCESS;
10611 }
10612
10613 case IEMMODE_32BIT:
10614 {
10615 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10616 IEM_MC_BEGIN(3, 0);
10617 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10618 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10619 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10620 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10621 IEM_MC_REF_EFLAGS(pEFlags);
10622 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10623 /* No clearing the high dword here - test doesn't write back the result. */
10624 IEM_MC_ADVANCE_RIP();
10625 IEM_MC_END();
10626 return VINF_SUCCESS;
10627 }
10628
10629 case IEMMODE_64BIT:
10630 {
10631 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10632 IEM_MC_BEGIN(3, 0);
10633 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10634 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10635 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10636 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10637 IEM_MC_REF_EFLAGS(pEFlags);
10638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10639 IEM_MC_ADVANCE_RIP();
10640 IEM_MC_END();
10641 return VINF_SUCCESS;
10642 }
10643
10644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10645 }
10646 }
10647 else
10648 {
10649 /* memory access. */
10650 switch (pVCpu->iem.s.enmEffOpSize)
10651 {
10652 case IEMMODE_16BIT:
10653 {
10654 IEM_MC_BEGIN(3, 2);
10655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10656 IEM_MC_ARG(uint16_t, u16Src, 1);
10657 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10659
10660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10661 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10662 IEM_MC_ASSIGN(u16Src, u16Imm);
10663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10664 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10665 IEM_MC_FETCH_EFLAGS(EFlags);
10666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10667
10668 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10669 IEM_MC_COMMIT_EFLAGS(EFlags);
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673 }
10674
10675 case IEMMODE_32BIT:
10676 {
10677 IEM_MC_BEGIN(3, 2);
10678 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10679 IEM_MC_ARG(uint32_t, u32Src, 1);
10680 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10682
10683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10684 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10685 IEM_MC_ASSIGN(u32Src, u32Imm);
10686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10687 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10688 IEM_MC_FETCH_EFLAGS(EFlags);
10689 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10690
10691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10692 IEM_MC_COMMIT_EFLAGS(EFlags);
10693 IEM_MC_ADVANCE_RIP();
10694 IEM_MC_END();
10695 return VINF_SUCCESS;
10696 }
10697
10698 case IEMMODE_64BIT:
10699 {
10700 IEM_MC_BEGIN(3, 2);
10701 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10702 IEM_MC_ARG(uint64_t, u64Src, 1);
10703 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10705
10706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10707 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10708 IEM_MC_ASSIGN(u64Src, u64Imm);
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10711 IEM_MC_FETCH_EFLAGS(EFlags);
10712 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10713
10714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10715 IEM_MC_COMMIT_EFLAGS(EFlags);
10716 IEM_MC_ADVANCE_RIP();
10717 IEM_MC_END();
10718 return VINF_SUCCESS;
10719 }
10720
10721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10722 }
10723 }
10724}
10725
10726
10727/** Opcode 0xf6 /4, /5, /6 and /7. */
10728FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10729{
10730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10731 {
10732 /* register access */
10733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10734 IEM_MC_BEGIN(3, 1);
10735 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10736 IEM_MC_ARG(uint8_t, u8Value, 1);
10737 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10738 IEM_MC_LOCAL(int32_t, rc);
10739
10740 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10741 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10742 IEM_MC_REF_EFLAGS(pEFlags);
10743 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10744 IEM_MC_IF_LOCAL_IS_Z(rc) {
10745 IEM_MC_ADVANCE_RIP();
10746 } IEM_MC_ELSE() {
10747 IEM_MC_RAISE_DIVIDE_ERROR();
10748 } IEM_MC_ENDIF();
10749
10750 IEM_MC_END();
10751 }
10752 else
10753 {
10754 /* memory access. */
10755 IEM_MC_BEGIN(3, 2);
10756 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10757 IEM_MC_ARG(uint8_t, u8Value, 1);
10758 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10760 IEM_MC_LOCAL(int32_t, rc);
10761
10762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10764 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10765 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10766 IEM_MC_REF_EFLAGS(pEFlags);
10767 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10768 IEM_MC_IF_LOCAL_IS_Z(rc) {
10769 IEM_MC_ADVANCE_RIP();
10770 } IEM_MC_ELSE() {
10771 IEM_MC_RAISE_DIVIDE_ERROR();
10772 } IEM_MC_ENDIF();
10773
10774 IEM_MC_END();
10775 }
10776 return VINF_SUCCESS;
10777}
10778
10779
10780/** Opcode 0xf7 /4, /5, /6 and /7. */
10781FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10782{
10783 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10784
10785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10786 {
10787 /* register access */
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789 switch (pVCpu->iem.s.enmEffOpSize)
10790 {
10791 case IEMMODE_16BIT:
10792 {
10793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10794 IEM_MC_BEGIN(4, 1);
10795 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10796 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10797 IEM_MC_ARG(uint16_t, u16Value, 2);
10798 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10799 IEM_MC_LOCAL(int32_t, rc);
10800
10801 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10802 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10803 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10804 IEM_MC_REF_EFLAGS(pEFlags);
10805 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10806 IEM_MC_IF_LOCAL_IS_Z(rc) {
10807 IEM_MC_ADVANCE_RIP();
10808 } IEM_MC_ELSE() {
10809 IEM_MC_RAISE_DIVIDE_ERROR();
10810 } IEM_MC_ENDIF();
10811
10812 IEM_MC_END();
10813 return VINF_SUCCESS;
10814 }
10815
10816 case IEMMODE_32BIT:
10817 {
10818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10819 IEM_MC_BEGIN(4, 1);
10820 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10821 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10822 IEM_MC_ARG(uint32_t, u32Value, 2);
10823 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10824 IEM_MC_LOCAL(int32_t, rc);
10825
10826 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10827 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10828 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10829 IEM_MC_REF_EFLAGS(pEFlags);
10830 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10831 IEM_MC_IF_LOCAL_IS_Z(rc) {
10832 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10833 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10834 IEM_MC_ADVANCE_RIP();
10835 } IEM_MC_ELSE() {
10836 IEM_MC_RAISE_DIVIDE_ERROR();
10837 } IEM_MC_ENDIF();
10838
10839 IEM_MC_END();
10840 return VINF_SUCCESS;
10841 }
10842
10843 case IEMMODE_64BIT:
10844 {
10845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10846 IEM_MC_BEGIN(4, 1);
10847 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10848 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10849 IEM_MC_ARG(uint64_t, u64Value, 2);
10850 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10851 IEM_MC_LOCAL(int32_t, rc);
10852
10853 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10854 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10855 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10856 IEM_MC_REF_EFLAGS(pEFlags);
10857 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10858 IEM_MC_IF_LOCAL_IS_Z(rc) {
10859 IEM_MC_ADVANCE_RIP();
10860 } IEM_MC_ELSE() {
10861 IEM_MC_RAISE_DIVIDE_ERROR();
10862 } IEM_MC_ENDIF();
10863
10864 IEM_MC_END();
10865 return VINF_SUCCESS;
10866 }
10867
10868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10869 }
10870 }
10871 else
10872 {
10873 /* memory access. */
10874 switch (pVCpu->iem.s.enmEffOpSize)
10875 {
10876 case IEMMODE_16BIT:
10877 {
10878 IEM_MC_BEGIN(4, 2);
10879 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10880 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10881 IEM_MC_ARG(uint16_t, u16Value, 2);
10882 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10884 IEM_MC_LOCAL(int32_t, rc);
10885
10886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10888 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10889 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10890 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10891 IEM_MC_REF_EFLAGS(pEFlags);
10892 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10893 IEM_MC_IF_LOCAL_IS_Z(rc) {
10894 IEM_MC_ADVANCE_RIP();
10895 } IEM_MC_ELSE() {
10896 IEM_MC_RAISE_DIVIDE_ERROR();
10897 } IEM_MC_ENDIF();
10898
10899 IEM_MC_END();
10900 return VINF_SUCCESS;
10901 }
10902
10903 case IEMMODE_32BIT:
10904 {
10905 IEM_MC_BEGIN(4, 2);
10906 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10907 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10908 IEM_MC_ARG(uint32_t, u32Value, 2);
10909 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10911 IEM_MC_LOCAL(int32_t, rc);
10912
10913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10915 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10916 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10917 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10918 IEM_MC_REF_EFLAGS(pEFlags);
10919 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10920 IEM_MC_IF_LOCAL_IS_Z(rc) {
10921 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10922 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10923 IEM_MC_ADVANCE_RIP();
10924 } IEM_MC_ELSE() {
10925 IEM_MC_RAISE_DIVIDE_ERROR();
10926 } IEM_MC_ENDIF();
10927
10928 IEM_MC_END();
10929 return VINF_SUCCESS;
10930 }
10931
10932 case IEMMODE_64BIT:
10933 {
10934 IEM_MC_BEGIN(4, 2);
10935 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10936 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10937 IEM_MC_ARG(uint64_t, u64Value, 2);
10938 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10940 IEM_MC_LOCAL(int32_t, rc);
10941
10942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10944 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10945 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10946 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10947 IEM_MC_REF_EFLAGS(pEFlags);
10948 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10949 IEM_MC_IF_LOCAL_IS_Z(rc) {
10950 IEM_MC_ADVANCE_RIP();
10951 } IEM_MC_ELSE() {
10952 IEM_MC_RAISE_DIVIDE_ERROR();
10953 } IEM_MC_ENDIF();
10954
10955 IEM_MC_END();
10956 return VINF_SUCCESS;
10957 }
10958
10959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10960 }
10961 }
10962}
10963
10964/**
10965 * @opcode 0xf6
10966 */
10967FNIEMOP_DEF(iemOp_Grp3_Eb)
10968{
10969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10971 {
10972 case 0:
10973 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10974 case 1:
10975/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10976 return IEMOP_RAISE_INVALID_OPCODE();
10977 case 2:
10978 IEMOP_MNEMONIC(not_Eb, "not Eb");
10979 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10980 case 3:
10981 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10982 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10983 case 4:
10984 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10986 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10987 case 5:
10988 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10990 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10991 case 6:
10992 IEMOP_MNEMONIC(div_Eb, "div Eb");
10993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10994 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10995 case 7:
10996 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10997 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10998 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11000 }
11001}
11002
11003
11004/**
11005 * @opcode 0xf7
11006 */
11007FNIEMOP_DEF(iemOp_Grp3_Ev)
11008{
11009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11010 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11011 {
11012 case 0:
11013 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11014 case 1:
11015/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11016 return IEMOP_RAISE_INVALID_OPCODE();
11017 case 2:
11018 IEMOP_MNEMONIC(not_Ev, "not Ev");
11019 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11020 case 3:
11021 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11022 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11023 case 4:
11024 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11026 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11027 case 5:
11028 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11029 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11030 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11031 case 6:
11032 IEMOP_MNEMONIC(div_Ev, "div Ev");
11033 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11034 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11035 case 7:
11036 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11037 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11038 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11040 }
11041}
11042
11043
11044/**
11045 * @opcode 0xf8
11046 */
11047FNIEMOP_DEF(iemOp_clc)
11048{
11049 IEMOP_MNEMONIC(clc, "clc");
11050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11051 IEM_MC_BEGIN(0, 0);
11052 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11053 IEM_MC_ADVANCE_RIP();
11054 IEM_MC_END();
11055 return VINF_SUCCESS;
11056}
11057
11058
11059/**
11060 * @opcode 0xf9
11061 */
11062FNIEMOP_DEF(iemOp_stc)
11063{
11064 IEMOP_MNEMONIC(stc, "stc");
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_BEGIN(0, 0);
11067 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11068 IEM_MC_ADVANCE_RIP();
11069 IEM_MC_END();
11070 return VINF_SUCCESS;
11071}
11072
11073
11074/**
11075 * @opcode 0xfa
11076 */
11077FNIEMOP_DEF(iemOp_cli)
11078{
11079 IEMOP_MNEMONIC(cli, "cli");
11080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11082}
11083
11084
11085FNIEMOP_DEF(iemOp_sti)
11086{
11087 IEMOP_MNEMONIC(sti, "sti");
11088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11089 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11090}
11091
11092
11093/**
11094 * @opcode 0xfc
11095 */
11096FNIEMOP_DEF(iemOp_cld)
11097{
11098 IEMOP_MNEMONIC(cld, "cld");
11099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11100 IEM_MC_BEGIN(0, 0);
11101 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11102 IEM_MC_ADVANCE_RIP();
11103 IEM_MC_END();
11104 return VINF_SUCCESS;
11105}
11106
11107
11108/**
11109 * @opcode 0xfd
11110 */
11111FNIEMOP_DEF(iemOp_std)
11112{
11113 IEMOP_MNEMONIC(std, "std");
11114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11115 IEM_MC_BEGIN(0, 0);
11116 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11117 IEM_MC_ADVANCE_RIP();
11118 IEM_MC_END();
11119 return VINF_SUCCESS;
11120}
11121
11122
11123/**
11124 * @opcode 0xfe
11125 */
11126FNIEMOP_DEF(iemOp_Grp4)
11127{
11128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11129 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11130 {
11131 case 0:
11132 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11133 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11134 case 1:
11135 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11136 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11137 default:
11138 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11139 return IEMOP_RAISE_INVALID_OPCODE();
11140 }
11141}
11142
11143
11144/**
11145 * Opcode 0xff /2.
11146 * @param bRm The RM byte.
11147 */
11148FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11149{
11150 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11152
11153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11154 {
11155 /* The new RIP is taken from a register. */
11156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11157 switch (pVCpu->iem.s.enmEffOpSize)
11158 {
11159 case IEMMODE_16BIT:
11160 IEM_MC_BEGIN(1, 0);
11161 IEM_MC_ARG(uint16_t, u16Target, 0);
11162 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11163 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11164 IEM_MC_END()
11165 return VINF_SUCCESS;
11166
11167 case IEMMODE_32BIT:
11168 IEM_MC_BEGIN(1, 0);
11169 IEM_MC_ARG(uint32_t, u32Target, 0);
11170 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11171 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11172 IEM_MC_END()
11173 return VINF_SUCCESS;
11174
11175 case IEMMODE_64BIT:
11176 IEM_MC_BEGIN(1, 0);
11177 IEM_MC_ARG(uint64_t, u64Target, 0);
11178 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11179 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11180 IEM_MC_END()
11181 return VINF_SUCCESS;
11182
11183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11184 }
11185 }
11186 else
11187 {
11188 /* The new RIP is taken from a register. */
11189 switch (pVCpu->iem.s.enmEffOpSize)
11190 {
11191 case IEMMODE_16BIT:
11192 IEM_MC_BEGIN(1, 1);
11193 IEM_MC_ARG(uint16_t, u16Target, 0);
11194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11197 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11198 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11199 IEM_MC_END()
11200 return VINF_SUCCESS;
11201
11202 case IEMMODE_32BIT:
11203 IEM_MC_BEGIN(1, 1);
11204 IEM_MC_ARG(uint32_t, u32Target, 0);
11205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11208 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11209 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11210 IEM_MC_END()
11211 return VINF_SUCCESS;
11212
11213 case IEMMODE_64BIT:
11214 IEM_MC_BEGIN(1, 1);
11215 IEM_MC_ARG(uint64_t, u64Target, 0);
11216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11219 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11220 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11221 IEM_MC_END()
11222 return VINF_SUCCESS;
11223
11224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11225 }
11226 }
11227}
11228
11229typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11230
11231FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11232{
11233 /* Registers? How?? */
11234 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11235 { /* likely */ }
11236 else
11237 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11238
11239 /* Far pointer loaded from memory. */
11240 switch (pVCpu->iem.s.enmEffOpSize)
11241 {
11242 case IEMMODE_16BIT:
11243 IEM_MC_BEGIN(3, 1);
11244 IEM_MC_ARG(uint16_t, u16Sel, 0);
11245 IEM_MC_ARG(uint16_t, offSeg, 1);
11246 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11250 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11251 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11252 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11253 IEM_MC_END();
11254 return VINF_SUCCESS;
11255
11256 case IEMMODE_64BIT:
11257 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11258 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11259 * and call far qword [rsp] encodings. */
11260 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11261 {
11262 IEM_MC_BEGIN(3, 1);
11263 IEM_MC_ARG(uint16_t, u16Sel, 0);
11264 IEM_MC_ARG(uint64_t, offSeg, 1);
11265 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11269 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11270 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11271 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11272 IEM_MC_END();
11273 return VINF_SUCCESS;
11274 }
11275 /* AMD falls thru. */
11276 /* fall thru */
11277
11278 case IEMMODE_32BIT:
11279 IEM_MC_BEGIN(3, 1);
11280 IEM_MC_ARG(uint16_t, u16Sel, 0);
11281 IEM_MC_ARG(uint32_t, offSeg, 1);
11282 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11286 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11287 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11288 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11289 IEM_MC_END();
11290 return VINF_SUCCESS;
11291
11292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11293 }
11294}
11295
11296
11297/**
11298 * Opcode 0xff /3.
11299 * @param bRm The RM byte.
11300 */
11301FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11302{
11303 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11304 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11305}
11306
11307
11308/**
11309 * Opcode 0xff /4.
11310 * @param bRm The RM byte.
11311 */
11312FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11313{
11314 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11315 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11316
11317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11318 {
11319 /* The new RIP is taken from a register. */
11320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11321 switch (pVCpu->iem.s.enmEffOpSize)
11322 {
11323 case IEMMODE_16BIT:
11324 IEM_MC_BEGIN(0, 1);
11325 IEM_MC_LOCAL(uint16_t, u16Target);
11326 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11327 IEM_MC_SET_RIP_U16(u16Target);
11328 IEM_MC_END()
11329 return VINF_SUCCESS;
11330
11331 case IEMMODE_32BIT:
11332 IEM_MC_BEGIN(0, 1);
11333 IEM_MC_LOCAL(uint32_t, u32Target);
11334 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11335 IEM_MC_SET_RIP_U32(u32Target);
11336 IEM_MC_END()
11337 return VINF_SUCCESS;
11338
11339 case IEMMODE_64BIT:
11340 IEM_MC_BEGIN(0, 1);
11341 IEM_MC_LOCAL(uint64_t, u64Target);
11342 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11343 IEM_MC_SET_RIP_U64(u64Target);
11344 IEM_MC_END()
11345 return VINF_SUCCESS;
11346
11347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11348 }
11349 }
11350 else
11351 {
11352 /* The new RIP is taken from a memory location. */
11353 switch (pVCpu->iem.s.enmEffOpSize)
11354 {
11355 case IEMMODE_16BIT:
11356 IEM_MC_BEGIN(0, 2);
11357 IEM_MC_LOCAL(uint16_t, u16Target);
11358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11362 IEM_MC_SET_RIP_U16(u16Target);
11363 IEM_MC_END()
11364 return VINF_SUCCESS;
11365
11366 case IEMMODE_32BIT:
11367 IEM_MC_BEGIN(0, 2);
11368 IEM_MC_LOCAL(uint32_t, u32Target);
11369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11372 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11373 IEM_MC_SET_RIP_U32(u32Target);
11374 IEM_MC_END()
11375 return VINF_SUCCESS;
11376
11377 case IEMMODE_64BIT:
11378 IEM_MC_BEGIN(0, 2);
11379 IEM_MC_LOCAL(uint64_t, u64Target);
11380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11383 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11384 IEM_MC_SET_RIP_U64(u64Target);
11385 IEM_MC_END()
11386 return VINF_SUCCESS;
11387
11388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11389 }
11390 }
11391}
11392
11393
11394/**
11395 * Opcode 0xff /5.
11396 * @param bRm The RM byte.
11397 */
11398FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11399{
11400 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11401 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11402}
11403
11404
11405/**
11406 * Opcode 0xff /6.
11407 * @param bRm The RM byte.
11408 */
11409FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC(push_Ev, "push Ev");
11412
11413 /* Registers are handled by a common worker. */
11414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11415 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11416
11417 /* Memory we do here. */
11418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11419 switch (pVCpu->iem.s.enmEffOpSize)
11420 {
11421 case IEMMODE_16BIT:
11422 IEM_MC_BEGIN(0, 2);
11423 IEM_MC_LOCAL(uint16_t, u16Src);
11424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11427 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11428 IEM_MC_PUSH_U16(u16Src);
11429 IEM_MC_ADVANCE_RIP();
11430 IEM_MC_END();
11431 return VINF_SUCCESS;
11432
11433 case IEMMODE_32BIT:
11434 IEM_MC_BEGIN(0, 2);
11435 IEM_MC_LOCAL(uint32_t, u32Src);
11436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11439 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11440 IEM_MC_PUSH_U32(u32Src);
11441 IEM_MC_ADVANCE_RIP();
11442 IEM_MC_END();
11443 return VINF_SUCCESS;
11444
11445 case IEMMODE_64BIT:
11446 IEM_MC_BEGIN(0, 2);
11447 IEM_MC_LOCAL(uint64_t, u64Src);
11448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11452 IEM_MC_PUSH_U64(u64Src);
11453 IEM_MC_ADVANCE_RIP();
11454 IEM_MC_END();
11455 return VINF_SUCCESS;
11456
11457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11458 }
11459}
11460
11461
11462/**
11463 * @opcode 0xff
11464 */
11465FNIEMOP_DEF(iemOp_Grp5)
11466{
11467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11468 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11469 {
11470 case 0:
11471 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11472 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11473 case 1:
11474 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11475 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11476 case 2:
11477 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11478 case 3:
11479 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11480 case 4:
11481 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11482 case 5:
11483 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11484 case 6:
11485 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11486 case 7:
11487 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11488 return IEMOP_RAISE_INVALID_OPCODE();
11489 }
11490 AssertFailedReturn(VERR_IEM_IPE_3);
11491}
11492
11493
11494
11495const PFNIEMOP g_apfnOneByteMap[256] =
11496{
11497 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11498 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11499 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11500 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11501 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11502 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11503 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11504 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11505 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11506 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11507 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11508 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11509 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11510 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11511 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11512 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11513 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11514 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11515 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11516 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11517 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11518 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11519 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11520 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11521 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11522 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11523 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11524 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11525 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11526 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11527 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11528 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11529 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11530 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11531 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11532 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11533 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11534 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11535 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11536 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11537 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11538 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11539 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11540 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11541 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11542 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11543 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11544 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11545 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11546 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11547 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11548 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11549 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11550 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11551 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11552 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11553 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11554 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11555 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11556 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11557 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11558 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11559 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11560 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11561};
11562
11563
11564/** @} */
11565
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette