VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66129

Last change on this file since 66129 was 66129, checked in by vboxsync, 8 years ago

IEM: Some OR testcases.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 378.3 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66129 2017-03-16 14:10:20Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42
43/**
44 * @opcode 0x00
45 * @opmnemonic add
46 * @op1 rm:Eb
47 * @op2 reg:Gb
48 * @opmaps one
49 * @openc ModR/M
50 * @opflmodify of,sf,zf,af,pf,cf
51 * @ophints harmless ignores_op_size
52 * @opstats add_Eb_Gb
53 * @opgroup op_gen_arith_bin
54 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
55 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
56 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
57 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
58 */
59FNIEMOP_DEF(iemOp_add_Eb_Gb)
60{
61 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
62 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
63}
64
65
66/**
67 * @opcode 0x01
68 * @opgroup op_gen_arith_bin
69 * @opflmodify of,sf,zf,af,pf,cf
70 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
71 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
72 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
73 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
74 */
75FNIEMOP_DEF(iemOp_add_Ev_Gv)
76{
77 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
78 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
79}
80
81
82/**
83 * @opcode 0x02
84 * @opgroup op_gen_arith_bin
85 * @opflmodify of,sf,zf,af,pf,cf
86 * @opcopytests iemOp_add_Eb_Gb
87 */
88FNIEMOP_DEF(iemOp_add_Gb_Eb)
89{
90 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
91 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
92}
93
94
95/**
96 * @opcode 0x03
97 * @opgroup op_gen_arith_bin
98 * @opflmodify of,sf,zf,af,pf,cf
99 * @opcopytests iemOp_add_Ev_Gv
100 */
101FNIEMOP_DEF(iemOp_add_Gv_Ev)
102{
103 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
105}
106
107
108/**
109 * @opcode 0x04
110 * @opgroup op_gen_arith_bin
111 * @opflmodify of,sf,zf,af,pf,cf
112 * @opcopytests iemOp_add_Eb_Gb
113 */
114FNIEMOP_DEF(iemOp_add_Al_Ib)
115{
116 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
117 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
118}
119
120
121/**
122 * @opcode 0x05
123 * @opgroup op_gen_arith_bin
124 * @opflmodify of,sf,zf,af,pf,cf
125 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
126 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
127 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
128 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
129 */
130FNIEMOP_DEF(iemOp_add_eAX_Iz)
131{
132 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
133 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
134}
135
136
137/**
138 * @opcode 0x06
139 * @opgroup op_stack_sreg
140 */
141FNIEMOP_DEF(iemOp_push_ES)
142{
143 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
144 IEMOP_HLP_NO_64BIT();
145 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
146}
147
148
149/**
150 * @opcode 0x07
151 * @opgroup op_stack_sreg
152 */
153FNIEMOP_DEF(iemOp_pop_ES)
154{
155 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
156 IEMOP_HLP_NO_64BIT();
157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
158 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
159}
160
161
162/**
163 * @opcode 0x08
164 * @opgroup op_gen_arith_bin
165 * @opflmodify of,sf,zf,af,pf,cf
166 * @opflundef af
167 * @opflclear of,cf
168 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
169 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
170 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
171 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
172 */
173FNIEMOP_DEF(iemOp_or_Eb_Gb)
174{
175 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
178}
179
180
181/*
182 * @opcode 0x09
183 * @opgroup op_gen_arith_bin
184 * @opflmodify of,sf,zf,af,pf,cf
185 * @opflundef af
186 * @opflclear of,cf
187 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
188 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
189 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
190 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
191 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
192 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
193 */
194FNIEMOP_DEF(iemOp_or_Ev_Gv)
195{
196 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
198 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
199}
200
201
202/**
203 * @opcode 0x0a
204 * @opgroup op_gen_arith_bin
205 * @opflmodify of,sf,zf,af,pf,cf
206 * @opflundef af
207 * @opflclear of,cf
208 * @opcopytests iemOp_or_Eb_Gb
209 */
210FNIEMOP_DEF(iemOp_or_Gb_Eb)
211{
212 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
214 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
215}
216
217
218/**
219 * @opcode 0x0b
220 * @opgroup op_gen_arith_bin
221 * @opflmodify of,sf,zf,af,pf,cf
222 * @opflundef af
223 * @opflclear of,cf
224 * @opcopytests iemOp_or_Ev_Gv
225 */
226FNIEMOP_DEF(iemOp_or_Gv_Ev)
227{
228 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
230 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
231}
232
233
234/**
235 * @opcode 0x0c
236 * @opgroup op_gen_arith_bin
237 * @opflmodify of,sf,zf,af,pf,cf
238 * @opflundef af
239 * @opflclear of,cf
240 * @opcopytests iemOp_or_Eb_Gb
241 */
242FNIEMOP_DEF(iemOp_or_Al_Ib)
243{
244 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
246 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
247}
248
249
250/*
251 * @opcode 0x0d
252 * @opgroup op_gen_arith_bin
253 * @opflmodify of,sf,zf,af,pf,cf
254 * @opflundef af
255 * @opflclear of,cf
256 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
257 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
258 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
259 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
260 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
261 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
262 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
263 */
264FNIEMOP_DEF(iemOp_or_eAX_Iz)
265{
266 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
268 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
269}
270
271
272/**
273 * @opcode 0x0e
274 * @opgroup op_stack_sreg
275 */
276FNIEMOP_DEF(iemOp_push_CS)
277{
278 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
279 IEMOP_HLP_NO_64BIT();
280 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
281}
282
283
284/**
285 * @opcode 0x0f
286 * @opmnemonic EscTwo0f
287 * @openc two0f
288 * @opdisenum OP_2B_ESC
289 * @ophints harmless
290 * @opgroup op_escapes
291 */
292FNIEMOP_DEF(iemOp_2byteEscape)
293{
294#ifdef VBOX_STRICT
295 /* Sanity check the table the first time around. */
296 static bool s_fTested = false;
297 if (RT_LIKELY(s_fTested)) { /* likely */ }
298 else
299 {
300 s_fTested = true;
301 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
302 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
303 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
304 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
305 }
306#endif
307
308 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
309 {
310 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
311 IEMOP_HLP_MIN_286();
312 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
313 }
314 /* @opdone */
315
316 /*
317 * On the 8086 this is a POP CS instruction.
318 * For the time being we don't specify this this.
319 */
320 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
321 IEMOP_HLP_NO_64BIT();
322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
323 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
324}
325
326/**
327 * @opcode 0x10
328 * @opgroup op_gen_arith_bin
329 * @opfltest cf
330 * @opflmodify of,sf,zf,af,pf,cf
331 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,nv,pl,nz,na,pe
332 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,nv,pl,nz,na,po
333 */
334FNIEMOP_DEF(iemOp_adc_Eb_Gb)
335{
336 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
337 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
338}
339
340
341/**
342 * @opcode 0x11
343 * @opgroup op_gen_arith_bin
344 * @opfltest cf
345 * @opflmodify of,sf,zf,af,pf,cf
346 */
347FNIEMOP_DEF(iemOp_adc_Ev_Gv)
348{
349 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
350 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
351}
352
353
354/**
355 * @opcode 0x12
356 * @opgroup op_gen_arith_bin
357 * @opfltest cf
358 * @opflmodify of,sf,zf,af,pf,cf
359 */
360FNIEMOP_DEF(iemOp_adc_Gb_Eb)
361{
362 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
363 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
364}
365
366
367/**
368 * @opcode 0x13
369 * @opgroup op_gen_arith_bin
370 * @opfltest cf
371 * @opflmodify of,sf,zf,af,pf,cf
372 */
373FNIEMOP_DEF(iemOp_adc_Gv_Ev)
374{
375 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x14
382 * @opgroup op_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify of,sf,zf,af,pf,cf
385 */
386FNIEMOP_DEF(iemOp_adc_Al_Ib)
387{
388 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
389 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
390}
391
392
393/**
394 * @opcode 0x15
395 * @opgroup op_gen_arith_bin
396 * @opfltest cf
397 * @opflmodify of,sf,zf,af,pf,cf
398 */
399FNIEMOP_DEF(iemOp_adc_eAX_Iz)
400{
401 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
402 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
403}
404
405
406/**
407 * @opcode 0x16
408 */
409FNIEMOP_DEF(iemOp_push_SS)
410{
411 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
412 IEMOP_HLP_NO_64BIT();
413 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
414}
415
416
417/**
418 * @opcode 0x17
419 * @opgroup op_gen_arith_bin
420 * @opfltest cf
421 * @opflmodify of,sf,zf,af,pf,cf
422 */
423FNIEMOP_DEF(iemOp_pop_SS)
424{
425 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
427 IEMOP_HLP_NO_64BIT();
428 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
429}
430
431
432/**
433 * @opcode 0x18
434 * @opgroup op_gen_arith_bin
435 * @opfltest cf
436 * @opflmodify of,sf,zf,af,pf,cf
437 */
438FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
439{
440 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
441 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
442}
443
444
445/**
446 * @opcode 0x19
447 * @opgroup op_gen_arith_bin
448 * @opfltest cf
449 * @opflmodify of,sf,zf,af,pf,cf
450 */
451FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
452{
453 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
454 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
455}
456
457
458/**
459 * @opcode 0x1a
460 * @opgroup op_gen_arith_bin
461 * @opfltest cf
462 * @opflmodify of,sf,zf,af,pf,cf
463 */
464FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
465{
466 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
468}
469
470
471/**
472 * @opcode 0x1b
473 * @opgroup op_gen_arith_bin
474 * @opfltest cf
475 * @opflmodify of,sf,zf,af,pf,cf
476 */
477FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
478{
479 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
480 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
481}
482
483
484/**
485 * @opcode 0x1c
486 * @opgroup op_gen_arith_bin
487 * @opfltest cf
488 * @opflmodify of,sf,zf,af,pf,cf
489 */
490FNIEMOP_DEF(iemOp_sbb_Al_Ib)
491{
492 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
493 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
494}
495
496
497/**
498 * @opcode 0x1d
499 * @opgroup op_gen_arith_bin
500 * @opfltest cf
501 * @opflmodify of,sf,zf,af,pf,cf
502 */
503FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
504{
505 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
506 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
507}
508
509
510/**
511 * @opcode 0x1e
512 * @opgroup op_stack_sreg
513 */
514FNIEMOP_DEF(iemOp_push_DS)
515{
516 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
517 IEMOP_HLP_NO_64BIT();
518 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
519}
520
521
522/**
523 * @opcode 0x1f
524 * @opgroup op_stack_sreg
525 */
526FNIEMOP_DEF(iemOp_pop_DS)
527{
528 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
530 IEMOP_HLP_NO_64BIT();
531 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
532}
533
534
535/**
536 * @opcode 0x20
537 * @opgroup op_gen_arith_bin
538 * @opflmodify of,sf,zf,af,pf,cf
539 * @opflundef af
540 * @opflclear of,cf
541 */
542FNIEMOP_DEF(iemOp_and_Eb_Gb)
543{
544 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
546 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
547}
548
549
550/**
551 * @opcode 0x21
552 * @opgroup op_gen_arith_bin
553 * @opflmodify of,sf,zf,af,pf,cf
554 * @opflundef af
555 * @opflclear of,cf
556 */
557FNIEMOP_DEF(iemOp_and_Ev_Gv)
558{
559 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
561 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
562}
563
564
565/**
566 * @opcode 0x22
567 * @opgroup op_gen_arith_bin
568 * @opflmodify of,sf,zf,af,pf,cf
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Gb_Eb)
573{
574 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x23
582 * @opgroup op_gen_arith_bin
583 * @opflmodify of,sf,zf,af,pf,cf
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Gv_Ev)
588{
589 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x24
597 * @opgroup op_gen_arith_bin
598 * @opflmodify of,sf,zf,af,pf,cf
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Al_Ib)
603{
604 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x25
612 * @opgroup op_gen_arith_bin
613 * @opflmodify of,sf,zf,af,pf,cf
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_eAX_Iz)
618{
619 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x26
627 * @opmnemonic SEG
628 * @op1 ES
629 * @opgroup op_prefix
630 * @openc prefix
631 * @opdisenum OP_SEG
632 * @ophints harmless
633 */
634FNIEMOP_DEF(iemOp_seg_ES)
635{
636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
638 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
639
640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
642}
643
644
645/**
646 * @opcode 0x27
647 * @opfltest af,cf
648 * @opflmodify of,sf,zf,af,pf,cf
649 * @opflundef of
650 */
651FNIEMOP_DEF(iemOp_daa)
652{
653 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
654 IEMOP_HLP_NO_64BIT();
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
657 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
658}
659
660
661/**
662 * @opcode 0x28
663 * @opgroup op_gen_arith_bin
664 * @opflmodify of,sf,zf,af,pf,cf
665 */
666FNIEMOP_DEF(iemOp_sub_Eb_Gb)
667{
668 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
669 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
670}
671
672
673/**
674 * @opcode 0x29
675 * @opgroup op_gen_arith_bin
676 * @opflmodify of,sf,zf,af,pf,cf
677 */
678FNIEMOP_DEF(iemOp_sub_Ev_Gv)
679{
680 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
682}
683
684
685/**
686 * @opcode 0x2a
687 * @opgroup op_gen_arith_bin
688 * @opflmodify of,sf,zf,af,pf,cf
689 */
690FNIEMOP_DEF(iemOp_sub_Gb_Eb)
691{
692 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
693 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
694}
695
696
697/**
698 * @opcode 0x2b
699 * @opgroup op_gen_arith_bin
700 * @opflmodify of,sf,zf,af,pf,cf
701 */
702FNIEMOP_DEF(iemOp_sub_Gv_Ev)
703{
704 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
705 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
706}
707
708
709/**
710 * @opcode 0x2c
711 * @opgroup op_gen_arith_bin
712 * @opflmodify of,sf,zf,af,pf,cf
713 */
714FNIEMOP_DEF(iemOp_sub_Al_Ib)
715{
716 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
717 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
718}
719
720
721/**
722 * @opcode 0x2d
723 * @opgroup op_gen_arith_bin
724 * @opflmodify of,sf,zf,af,pf,cf
725 */
726FNIEMOP_DEF(iemOp_sub_eAX_Iz)
727{
728 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
729 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
730}
731
732
733/**
734 * @opcode 0x2e
735 * @opmnemonic SEG
736 * @op1 CS
737 * @opgroup op_prefix
738 * @openc prefix
739 * @opdisenum OP_SEG
740 * @ophints harmless
741 */
742FNIEMOP_DEF(iemOp_seg_CS)
743{
744 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
745 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
746 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
747
748 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
749 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
750}
751
752
753/**
754 * @opcode 0x2f
755 * @opfltest af,cf
756 * @opflmodify of,sf,zf,af,pf,cf
757 * @opflundef of
758 */
759FNIEMOP_DEF(iemOp_das)
760{
761 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
762 IEMOP_HLP_NO_64BIT();
763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
764 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
765 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
766}
767
768
769/**
770 * @opcode 0x30
771 * @opgroup op_gen_arith_bin
772 * @opflmodify of,sf,zf,af,pf,cf
773 * @opflundef af
774 * @opflclear of,cf
775 */
776FNIEMOP_DEF(iemOp_xor_Eb_Gb)
777{
778 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
779 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
780 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
781}
782
783
784/**
785 * @opcode 0x31
786 * @opgroup op_gen_arith_bin
787 * @opflmodify of,sf,zf,af,pf,cf
788 * @opflundef af
789 * @opflclear of,cf
790 */
791FNIEMOP_DEF(iemOp_xor_Ev_Gv)
792{
793 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, 0);
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
795 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
796}
797
798
799/**
800 * @opcode 0x32
801 * @opgroup op_gen_arith_bin
802 * @opflmodify of,sf,zf,af,pf,cf
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Gb_Eb)
807{
808 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x33
816 * @opgroup op_gen_arith_bin
817 * @opflmodify of,sf,zf,af,pf,cf
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Gv_Ev)
822{
823 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x34
831 * @opgroup op_gen_arith_bin
832 * @opflmodify of,sf,zf,af,pf,cf
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Al_Ib)
837{
838 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x35
846 * @opgroup op_gen_arith_bin
847 * @opflmodify of,sf,zf,af,pf,cf
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_eAX_Iz)
852{
853 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x36
861 */
862FNIEMOP_DEF(iemOp_seg_SS)
863{
864 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
866 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
867
868 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
869 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
870}
871
872
873/**
874 * @opcode 0x37
875 */
876FNIEMOP_STUB(iemOp_aaa);
877
878
879/**
880 * @opcode 0x38
881 */
882FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
883{
884 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
886}
887
888
889/**
890 * @opcode 0x39
891 */
892FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
893{
894 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
895 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
896}
897
898
899/**
900 * @opcode 0x3a
901 */
902FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
903{
904 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
906}
907
908
909/**
910 * @opcode 0x3b
911 */
912FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
913{
914 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
915 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
916}
917
918
919/**
920 * @opcode 0x3c
921 */
922FNIEMOP_DEF(iemOp_cmp_Al_Ib)
923{
924 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
925 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
926}
927
928
929/**
930 * @opcode 0x3d
931 */
932FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
933{
934 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
935 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
936}
937
938
939/**
940 * @opcode 0x3e
941 */
942FNIEMOP_DEF(iemOp_seg_DS)
943{
944 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
945 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
946 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
947
948 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
949 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
950}
951
952
953/**
954 * @opcode 0x3f
955 */
956FNIEMOP_STUB(iemOp_aas);
957
958/**
959 * Common 'inc/dec/not/neg register' helper.
960 */
961FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
962{
963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
964 switch (pVCpu->iem.s.enmEffOpSize)
965 {
966 case IEMMODE_16BIT:
967 IEM_MC_BEGIN(2, 0);
968 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
969 IEM_MC_ARG(uint32_t *, pEFlags, 1);
970 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
971 IEM_MC_REF_EFLAGS(pEFlags);
972 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
973 IEM_MC_ADVANCE_RIP();
974 IEM_MC_END();
975 return VINF_SUCCESS;
976
977 case IEMMODE_32BIT:
978 IEM_MC_BEGIN(2, 0);
979 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
980 IEM_MC_ARG(uint32_t *, pEFlags, 1);
981 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
982 IEM_MC_REF_EFLAGS(pEFlags);
983 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
984 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
985 IEM_MC_ADVANCE_RIP();
986 IEM_MC_END();
987 return VINF_SUCCESS;
988
989 case IEMMODE_64BIT:
990 IEM_MC_BEGIN(2, 0);
991 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
992 IEM_MC_ARG(uint32_t *, pEFlags, 1);
993 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
994 IEM_MC_REF_EFLAGS(pEFlags);
995 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
996 IEM_MC_ADVANCE_RIP();
997 IEM_MC_END();
998 return VINF_SUCCESS;
999 }
1000 return VINF_SUCCESS;
1001}
1002
1003
1004/**
1005 * @opcode 0x40
1006 */
1007FNIEMOP_DEF(iemOp_inc_eAX)
1008{
1009 /*
1010 * This is a REX prefix in 64-bit mode.
1011 */
1012 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1013 {
1014 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1015 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019 }
1020
1021 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1022 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1023}
1024
1025
1026/**
1027 * @opcode 0x41
1028 */
1029FNIEMOP_DEF(iemOp_inc_eCX)
1030{
1031 /*
1032 * This is a REX prefix in 64-bit mode.
1033 */
1034 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1035 {
1036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1038 pVCpu->iem.s.uRexB = 1 << 3;
1039
1040 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1041 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1042 }
1043
1044 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1045 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1046}
1047
1048
1049/**
1050 * @opcode 0x42
1051 */
1052FNIEMOP_DEF(iemOp_inc_eDX)
1053{
1054 /*
1055 * This is a REX prefix in 64-bit mode.
1056 */
1057 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1058 {
1059 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1060 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1061 pVCpu->iem.s.uRexIndex = 1 << 3;
1062
1063 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1064 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1065 }
1066
1067 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1068 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1069}
1070
1071
1072
1073/**
1074 * @opcode 0x43
1075 */
1076FNIEMOP_DEF(iemOp_inc_eBX)
1077{
1078 /*
1079 * This is a REX prefix in 64-bit mode.
1080 */
1081 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1082 {
1083 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1084 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1085 pVCpu->iem.s.uRexB = 1 << 3;
1086 pVCpu->iem.s.uRexIndex = 1 << 3;
1087
1088 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1089 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1090 }
1091
1092 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1093 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1094}
1095
1096
1097/**
1098 * @opcode 0x44
1099 */
1100FNIEMOP_DEF(iemOp_inc_eSP)
1101{
1102 /*
1103 * This is a REX prefix in 64-bit mode.
1104 */
1105 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1106 {
1107 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1108 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1109 pVCpu->iem.s.uRexReg = 1 << 3;
1110
1111 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1112 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1113 }
1114
1115 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1116 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1117}
1118
1119
1120/**
1121 * @opcode 0x45
1122 */
1123FNIEMOP_DEF(iemOp_inc_eBP)
1124{
1125 /*
1126 * This is a REX prefix in 64-bit mode.
1127 */
1128 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1129 {
1130 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1131 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1132 pVCpu->iem.s.uRexReg = 1 << 3;
1133 pVCpu->iem.s.uRexB = 1 << 3;
1134
1135 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1136 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1137 }
1138
1139 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1140 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1141}
1142
1143
1144/**
1145 * @opcode 0x46
1146 */
1147FNIEMOP_DEF(iemOp_inc_eSI)
1148{
1149 /*
1150 * This is a REX prefix in 64-bit mode.
1151 */
1152 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1153 {
1154 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1155 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1156 pVCpu->iem.s.uRexReg = 1 << 3;
1157 pVCpu->iem.s.uRexIndex = 1 << 3;
1158
1159 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1160 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1161 }
1162
1163 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1164 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1165}
1166
1167
1168/**
1169 * @opcode 0x47
1170 */
1171FNIEMOP_DEF(iemOp_inc_eDI)
1172{
1173 /*
1174 * This is a REX prefix in 64-bit mode.
1175 */
1176 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1177 {
1178 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1179 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1180 pVCpu->iem.s.uRexReg = 1 << 3;
1181 pVCpu->iem.s.uRexB = 1 << 3;
1182 pVCpu->iem.s.uRexIndex = 1 << 3;
1183
1184 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1185 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1186 }
1187
1188 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1189 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1190}
1191
1192
1193/**
1194 * @opcode 0x48
1195 */
1196FNIEMOP_DEF(iemOp_dec_eAX)
1197{
1198 /*
1199 * This is a REX prefix in 64-bit mode.
1200 */
1201 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1202 {
1203 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1204 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1205 iemRecalEffOpSize(pVCpu);
1206
1207 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1208 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1209 }
1210
1211 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1212 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1213}
1214
1215
1216/**
1217 * @opcode 0x49
1218 */
1219FNIEMOP_DEF(iemOp_dec_eCX)
1220{
1221 /*
1222 * This is a REX prefix in 64-bit mode.
1223 */
1224 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1225 {
1226 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1227 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1228 pVCpu->iem.s.uRexB = 1 << 3;
1229 iemRecalEffOpSize(pVCpu);
1230
1231 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1232 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1233 }
1234
1235 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1236 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1237}
1238
1239
1240/**
1241 * @opcode 0x4a
1242 */
1243FNIEMOP_DEF(iemOp_dec_eDX)
1244{
1245 /*
1246 * This is a REX prefix in 64-bit mode.
1247 */
1248 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1249 {
1250 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1251 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1252 pVCpu->iem.s.uRexIndex = 1 << 3;
1253 iemRecalEffOpSize(pVCpu);
1254
1255 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1256 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1257 }
1258
1259 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1260 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1261}
1262
1263
1264/**
1265 * @opcode 0x4b
1266 */
1267FNIEMOP_DEF(iemOp_dec_eBX)
1268{
1269 /*
1270 * This is a REX prefix in 64-bit mode.
1271 */
1272 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1273 {
1274 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1275 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1276 pVCpu->iem.s.uRexB = 1 << 3;
1277 pVCpu->iem.s.uRexIndex = 1 << 3;
1278 iemRecalEffOpSize(pVCpu);
1279
1280 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1281 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1282 }
1283
1284 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1285 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1286}
1287
1288
1289/**
1290 * @opcode 0x4c
1291 */
1292FNIEMOP_DEF(iemOp_dec_eSP)
1293{
1294 /*
1295 * This is a REX prefix in 64-bit mode.
1296 */
1297 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1298 {
1299 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1300 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1301 pVCpu->iem.s.uRexReg = 1 << 3;
1302 iemRecalEffOpSize(pVCpu);
1303
1304 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1305 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1306 }
1307
1308 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1309 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1310}
1311
1312
1313/**
1314 * @opcode 0x4d
1315 */
1316FNIEMOP_DEF(iemOp_dec_eBP)
1317{
1318 /*
1319 * This is a REX prefix in 64-bit mode.
1320 */
1321 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1322 {
1323 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1324 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1325 pVCpu->iem.s.uRexReg = 1 << 3;
1326 pVCpu->iem.s.uRexB = 1 << 3;
1327 iemRecalEffOpSize(pVCpu);
1328
1329 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1330 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1331 }
1332
1333 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1334 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1335}
1336
1337
1338/**
1339 * @opcode 0x4e
1340 */
1341FNIEMOP_DEF(iemOp_dec_eSI)
1342{
1343 /*
1344 * This is a REX prefix in 64-bit mode.
1345 */
1346 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1347 {
1348 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1349 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1350 pVCpu->iem.s.uRexReg = 1 << 3;
1351 pVCpu->iem.s.uRexIndex = 1 << 3;
1352 iemRecalEffOpSize(pVCpu);
1353
1354 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1355 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1356 }
1357
1358 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1359 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1360}
1361
1362
1363/**
1364 * @opcode 0x4f
1365 */
1366FNIEMOP_DEF(iemOp_dec_eDI)
1367{
1368 /*
1369 * This is a REX prefix in 64-bit mode.
1370 */
1371 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1372 {
1373 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1374 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1375 pVCpu->iem.s.uRexReg = 1 << 3;
1376 pVCpu->iem.s.uRexB = 1 << 3;
1377 pVCpu->iem.s.uRexIndex = 1 << 3;
1378 iemRecalEffOpSize(pVCpu);
1379
1380 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1381 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1382 }
1383
1384 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1385 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1386}
1387
1388
1389/**
1390 * Common 'push register' helper.
1391 */
1392FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1393{
1394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1395 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1396 {
1397 iReg |= pVCpu->iem.s.uRexB;
1398 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1399 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1400 }
1401
1402 switch (pVCpu->iem.s.enmEffOpSize)
1403 {
1404 case IEMMODE_16BIT:
1405 IEM_MC_BEGIN(0, 1);
1406 IEM_MC_LOCAL(uint16_t, u16Value);
1407 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1408 IEM_MC_PUSH_U16(u16Value);
1409 IEM_MC_ADVANCE_RIP();
1410 IEM_MC_END();
1411 break;
1412
1413 case IEMMODE_32BIT:
1414 IEM_MC_BEGIN(0, 1);
1415 IEM_MC_LOCAL(uint32_t, u32Value);
1416 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1417 IEM_MC_PUSH_U32(u32Value);
1418 IEM_MC_ADVANCE_RIP();
1419 IEM_MC_END();
1420 break;
1421
1422 case IEMMODE_64BIT:
1423 IEM_MC_BEGIN(0, 1);
1424 IEM_MC_LOCAL(uint64_t, u64Value);
1425 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1426 IEM_MC_PUSH_U64(u64Value);
1427 IEM_MC_ADVANCE_RIP();
1428 IEM_MC_END();
1429 break;
1430 }
1431
1432 return VINF_SUCCESS;
1433}
1434
1435
1436/**
1437 * @opcode 0x50
1438 */
1439FNIEMOP_DEF(iemOp_push_eAX)
1440{
1441 IEMOP_MNEMONIC(push_rAX, "push rAX");
1442 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1443}
1444
1445
1446/**
1447 * @opcode 0x51
1448 */
1449FNIEMOP_DEF(iemOp_push_eCX)
1450{
1451 IEMOP_MNEMONIC(push_rCX, "push rCX");
1452 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1453}
1454
1455
1456/**
1457 * @opcode 0x52
1458 */
1459FNIEMOP_DEF(iemOp_push_eDX)
1460{
1461 IEMOP_MNEMONIC(push_rDX, "push rDX");
1462 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1463}
1464
1465
1466/**
1467 * @opcode 0x53
1468 */
1469FNIEMOP_DEF(iemOp_push_eBX)
1470{
1471 IEMOP_MNEMONIC(push_rBX, "push rBX");
1472 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1473}
1474
1475
1476/**
1477 * @opcode 0x54
1478 */
1479FNIEMOP_DEF(iemOp_push_eSP)
1480{
1481 IEMOP_MNEMONIC(push_rSP, "push rSP");
1482 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1483 {
1484 IEM_MC_BEGIN(0, 1);
1485 IEM_MC_LOCAL(uint16_t, u16Value);
1486 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1487 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1488 IEM_MC_PUSH_U16(u16Value);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1493}
1494
1495
1496/**
1497 * @opcode 0x55
1498 */
1499FNIEMOP_DEF(iemOp_push_eBP)
1500{
1501 IEMOP_MNEMONIC(push_rBP, "push rBP");
1502 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1503}
1504
1505
1506/**
1507 * @opcode 0x56
1508 */
1509FNIEMOP_DEF(iemOp_push_eSI)
1510{
1511 IEMOP_MNEMONIC(push_rSI, "push rSI");
1512 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1513}
1514
1515
1516/**
1517 * @opcode 0x57
1518 */
1519FNIEMOP_DEF(iemOp_push_eDI)
1520{
1521 IEMOP_MNEMONIC(push_rDI, "push rDI");
1522 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1523}
1524
1525
1526/**
1527 * Common 'pop register' helper.
1528 */
1529FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1530{
1531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1532 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1533 {
1534 iReg |= pVCpu->iem.s.uRexB;
1535 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1536 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1537 }
1538
1539 switch (pVCpu->iem.s.enmEffOpSize)
1540 {
1541 case IEMMODE_16BIT:
1542 IEM_MC_BEGIN(0, 1);
1543 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1544 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1545 IEM_MC_POP_U16(pu16Dst);
1546 IEM_MC_ADVANCE_RIP();
1547 IEM_MC_END();
1548 break;
1549
1550 case IEMMODE_32BIT:
1551 IEM_MC_BEGIN(0, 1);
1552 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1553 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1554 IEM_MC_POP_U32(pu32Dst);
1555 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1556 IEM_MC_ADVANCE_RIP();
1557 IEM_MC_END();
1558 break;
1559
1560 case IEMMODE_64BIT:
1561 IEM_MC_BEGIN(0, 1);
1562 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1563 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1564 IEM_MC_POP_U64(pu64Dst);
1565 IEM_MC_ADVANCE_RIP();
1566 IEM_MC_END();
1567 break;
1568 }
1569
1570 return VINF_SUCCESS;
1571}
1572
1573
1574/**
1575 * @opcode 0x58
1576 */
1577FNIEMOP_DEF(iemOp_pop_eAX)
1578{
1579 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1580 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1581}
1582
1583
1584/**
1585 * @opcode 0x59
1586 */
1587FNIEMOP_DEF(iemOp_pop_eCX)
1588{
1589 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1590 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1591}
1592
1593
1594/**
1595 * @opcode 0x5a
1596 */
1597FNIEMOP_DEF(iemOp_pop_eDX)
1598{
1599 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1600 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1601}
1602
1603
1604/**
1605 * @opcode 0x5b
1606 */
1607FNIEMOP_DEF(iemOp_pop_eBX)
1608{
1609 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1610 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1611}
1612
1613
1614/**
1615 * @opcode 0x5c
1616 */
1617FNIEMOP_DEF(iemOp_pop_eSP)
1618{
1619 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1620 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1621 {
1622 if (pVCpu->iem.s.uRexB)
1623 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1624 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1625 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1626 }
1627
1628 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1629 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1630 /** @todo add testcase for this instruction. */
1631 switch (pVCpu->iem.s.enmEffOpSize)
1632 {
1633 case IEMMODE_16BIT:
1634 IEM_MC_BEGIN(0, 1);
1635 IEM_MC_LOCAL(uint16_t, u16Dst);
1636 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1637 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1638 IEM_MC_ADVANCE_RIP();
1639 IEM_MC_END();
1640 break;
1641
1642 case IEMMODE_32BIT:
1643 IEM_MC_BEGIN(0, 1);
1644 IEM_MC_LOCAL(uint32_t, u32Dst);
1645 IEM_MC_POP_U32(&u32Dst);
1646 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1647 IEM_MC_ADVANCE_RIP();
1648 IEM_MC_END();
1649 break;
1650
1651 case IEMMODE_64BIT:
1652 IEM_MC_BEGIN(0, 1);
1653 IEM_MC_LOCAL(uint64_t, u64Dst);
1654 IEM_MC_POP_U64(&u64Dst);
1655 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1656 IEM_MC_ADVANCE_RIP();
1657 IEM_MC_END();
1658 break;
1659 }
1660
1661 return VINF_SUCCESS;
1662}
1663
1664
1665/**
1666 * @opcode 0x5d
1667 */
1668FNIEMOP_DEF(iemOp_pop_eBP)
1669{
1670 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1671 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1672}
1673
1674
1675/**
1676 * @opcode 0x5e
1677 */
1678FNIEMOP_DEF(iemOp_pop_eSI)
1679{
1680 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1681 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1682}
1683
1684
1685/**
1686 * @opcode 0x5f
1687 */
1688FNIEMOP_DEF(iemOp_pop_eDI)
1689{
1690 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1691 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1692}
1693
1694
1695/**
1696 * @opcode 0x60
1697 */
1698FNIEMOP_DEF(iemOp_pusha)
1699{
1700 IEMOP_MNEMONIC(pusha, "pusha");
1701 IEMOP_HLP_MIN_186();
1702 IEMOP_HLP_NO_64BIT();
1703 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1704 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1705 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1706 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1707}
1708
1709
1710/**
1711 * @opcode 0x61
1712 */
1713FNIEMOP_DEF(iemOp_popa__mvex)
1714{
1715 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1716 {
1717 IEMOP_MNEMONIC(popa, "popa");
1718 IEMOP_HLP_MIN_186();
1719 IEMOP_HLP_NO_64BIT();
1720 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1721 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1722 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1723 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1724 }
1725 IEMOP_MNEMONIC(mvex, "mvex");
1726 Log(("mvex prefix is not supported!\n"));
1727 return IEMOP_RAISE_INVALID_OPCODE();
1728}
1729
1730
1731/**
1732 * @opcode 0x62
1733 * @opmnemonic bound
1734 * @op1 Gv
1735 * @op2 Ma
1736 * @opmincpu 80186
1737 * @ophints harmless invalid_64
1738 */
1739FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1740// IEMOP_HLP_MIN_186();
1741
1742
1743/** Opcode 0x63 - non-64-bit modes. */
1744FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1745{
1746 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1747 IEMOP_HLP_MIN_286();
1748 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750
1751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1752 {
1753 /* Register */
1754 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1755 IEM_MC_BEGIN(3, 0);
1756 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1757 IEM_MC_ARG(uint16_t, u16Src, 1);
1758 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1759
1760 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1761 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1762 IEM_MC_REF_EFLAGS(pEFlags);
1763 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1764
1765 IEM_MC_ADVANCE_RIP();
1766 IEM_MC_END();
1767 }
1768 else
1769 {
1770 /* Memory */
1771 IEM_MC_BEGIN(3, 2);
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1773 IEM_MC_ARG(uint16_t, u16Src, 1);
1774 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1776
1777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1778 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1779 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1780 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1781 IEM_MC_FETCH_EFLAGS(EFlags);
1782 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1783
1784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1785 IEM_MC_COMMIT_EFLAGS(EFlags);
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790
1791}
1792
1793
1794/**
1795 * @opcode 0x63
1796 *
1797 * @note This is a weird one. It works like a regular move instruction if
1798 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1799 * @todo This definitely needs a testcase to verify the odd cases. */
1800FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1801{
1802 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1803
1804 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1806
1807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1808 {
1809 /*
1810 * Register to register.
1811 */
1812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1813 IEM_MC_BEGIN(0, 1);
1814 IEM_MC_LOCAL(uint64_t, u64Value);
1815 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1816 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1817 IEM_MC_ADVANCE_RIP();
1818 IEM_MC_END();
1819 }
1820 else
1821 {
1822 /*
1823 * We're loading a register from memory.
1824 */
1825 IEM_MC_BEGIN(0, 2);
1826 IEM_MC_LOCAL(uint64_t, u64Value);
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1830 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1831 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1832 IEM_MC_ADVANCE_RIP();
1833 IEM_MC_END();
1834 }
1835 return VINF_SUCCESS;
1836}
1837
1838
1839/**
1840 * @opcode 0x64
1841 * @opmnemonic segfs
1842 * @opmincpu 80386
1843 * @opgroup op_prefixes
1844 */
1845FNIEMOP_DEF(iemOp_seg_FS)
1846{
1847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1848 IEMOP_HLP_MIN_386();
1849
1850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1851 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1852
1853 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1854 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1855}
1856
1857
1858/**
1859 * @opcode 0x65
1860 * @opmnemonic seggs
1861 * @opmincpu 80386
1862 * @opgroup op_prefixes
1863 */
1864FNIEMOP_DEF(iemOp_seg_GS)
1865{
1866 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1867 IEMOP_HLP_MIN_386();
1868
1869 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1870 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1871
1872 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1873 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1874}
1875
1876
1877/**
1878 * @opcode 0x66
1879 * @opmnemonic opsize
1880 * @openc prefix
1881 * @opmincpu 80386
1882 * @ophints harmless
1883 * @opgroup op_prefixes
1884 */
1885FNIEMOP_DEF(iemOp_op_size)
1886{
1887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1888 IEMOP_HLP_MIN_386();
1889
1890 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1891 iemRecalEffOpSize(pVCpu);
1892
1893 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1894 when REPZ or REPNZ are present. */
1895 if (pVCpu->iem.s.idxPrefix == 0)
1896 pVCpu->iem.s.idxPrefix = 1;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900}
1901
1902
1903/**
1904 * @opcode 0x67
1905 * @opmnemonic addrsize
1906 * @openc prefix
1907 * @opmincpu 80386
1908 * @ophints harmless
1909 * @opgroup op_prefixes
1910 */
1911FNIEMOP_DEF(iemOp_addr_size)
1912{
1913 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1914 IEMOP_HLP_MIN_386();
1915
1916 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1917 switch (pVCpu->iem.s.enmDefAddrMode)
1918 {
1919 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1920 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1921 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1922 default: AssertFailed();
1923 }
1924
1925 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1926 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1927}
1928
1929
1930/**
1931 * @opcode 0x68
1932 */
1933FNIEMOP_DEF(iemOp_push_Iz)
1934{
1935 IEMOP_MNEMONIC(push_Iz, "push Iz");
1936 IEMOP_HLP_MIN_186();
1937 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1938 switch (pVCpu->iem.s.enmEffOpSize)
1939 {
1940 case IEMMODE_16BIT:
1941 {
1942 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1944 IEM_MC_BEGIN(0,0);
1945 IEM_MC_PUSH_U16(u16Imm);
1946 IEM_MC_ADVANCE_RIP();
1947 IEM_MC_END();
1948 return VINF_SUCCESS;
1949 }
1950
1951 case IEMMODE_32BIT:
1952 {
1953 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_BEGIN(0,0);
1956 IEM_MC_PUSH_U32(u32Imm);
1957 IEM_MC_ADVANCE_RIP();
1958 IEM_MC_END();
1959 return VINF_SUCCESS;
1960 }
1961
1962 case IEMMODE_64BIT:
1963 {
1964 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1966 IEM_MC_BEGIN(0,0);
1967 IEM_MC_PUSH_U64(u64Imm);
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 return VINF_SUCCESS;
1971 }
1972
1973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1974 }
1975}
1976
1977
1978/**
1979 * @opcode 0x69
1980 */
1981FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1982{
1983 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1984 IEMOP_HLP_MIN_186();
1985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1987
1988 switch (pVCpu->iem.s.enmEffOpSize)
1989 {
1990 case IEMMODE_16BIT:
1991 {
1992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1993 {
1994 /* register operand */
1995 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1997
1998 IEM_MC_BEGIN(3, 1);
1999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2000 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2001 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2002 IEM_MC_LOCAL(uint16_t, u16Tmp);
2003
2004 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2005 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2006 IEM_MC_REF_EFLAGS(pEFlags);
2007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2008 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2009
2010 IEM_MC_ADVANCE_RIP();
2011 IEM_MC_END();
2012 }
2013 else
2014 {
2015 /* memory operand */
2016 IEM_MC_BEGIN(3, 2);
2017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2018 IEM_MC_ARG(uint16_t, u16Src, 1);
2019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2020 IEM_MC_LOCAL(uint16_t, u16Tmp);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2024 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2025 IEM_MC_ASSIGN(u16Src, u16Imm);
2026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2027 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2028 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2029 IEM_MC_REF_EFLAGS(pEFlags);
2030 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2031 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2032
2033 IEM_MC_ADVANCE_RIP();
2034 IEM_MC_END();
2035 }
2036 return VINF_SUCCESS;
2037 }
2038
2039 case IEMMODE_32BIT:
2040 {
2041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2042 {
2043 /* register operand */
2044 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2046
2047 IEM_MC_BEGIN(3, 1);
2048 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2049 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2051 IEM_MC_LOCAL(uint32_t, u32Tmp);
2052
2053 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2054 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2055 IEM_MC_REF_EFLAGS(pEFlags);
2056 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2057 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2058
2059 IEM_MC_ADVANCE_RIP();
2060 IEM_MC_END();
2061 }
2062 else
2063 {
2064 /* memory operand */
2065 IEM_MC_BEGIN(3, 2);
2066 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2067 IEM_MC_ARG(uint32_t, u32Src, 1);
2068 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2069 IEM_MC_LOCAL(uint32_t, u32Tmp);
2070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2071
2072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2073 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2074 IEM_MC_ASSIGN(u32Src, u32Imm);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2078 IEM_MC_REF_EFLAGS(pEFlags);
2079 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2080 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2081
2082 IEM_MC_ADVANCE_RIP();
2083 IEM_MC_END();
2084 }
2085 return VINF_SUCCESS;
2086 }
2087
2088 case IEMMODE_64BIT:
2089 {
2090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2091 {
2092 /* register operand */
2093 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2095
2096 IEM_MC_BEGIN(3, 1);
2097 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2098 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2100 IEM_MC_LOCAL(uint64_t, u64Tmp);
2101
2102 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2103 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2104 IEM_MC_REF_EFLAGS(pEFlags);
2105 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2106 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /* memory operand */
2114 IEM_MC_BEGIN(3, 2);
2115 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2116 IEM_MC_ARG(uint64_t, u64Src, 1);
2117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2118 IEM_MC_LOCAL(uint64_t, u64Tmp);
2119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2120
2121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2122 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2123 IEM_MC_ASSIGN(u64Src, u64Imm);
2124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2125 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2126 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2127 IEM_MC_REF_EFLAGS(pEFlags);
2128 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2129 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135 }
2136 }
2137 AssertFailedReturn(VERR_IEM_IPE_9);
2138}
2139
2140
2141/**
2142 * @opcode 0x6a
2143 */
2144FNIEMOP_DEF(iemOp_push_Ib)
2145{
2146 IEMOP_MNEMONIC(push_Ib, "push Ib");
2147 IEMOP_HLP_MIN_186();
2148 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2150 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2151
2152 IEM_MC_BEGIN(0,0);
2153 switch (pVCpu->iem.s.enmEffOpSize)
2154 {
2155 case IEMMODE_16BIT:
2156 IEM_MC_PUSH_U16(i8Imm);
2157 break;
2158 case IEMMODE_32BIT:
2159 IEM_MC_PUSH_U32(i8Imm);
2160 break;
2161 case IEMMODE_64BIT:
2162 IEM_MC_PUSH_U64(i8Imm);
2163 break;
2164 }
2165 IEM_MC_ADVANCE_RIP();
2166 IEM_MC_END();
2167 return VINF_SUCCESS;
2168}
2169
2170
2171/**
2172 * @opcode 0x6b
2173 */
2174FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2175{
2176 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2177 IEMOP_HLP_MIN_186();
2178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2179 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2180
2181 switch (pVCpu->iem.s.enmEffOpSize)
2182 {
2183 case IEMMODE_16BIT:
2184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2185 {
2186 /* register operand */
2187 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2189
2190 IEM_MC_BEGIN(3, 1);
2191 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2192 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2194 IEM_MC_LOCAL(uint16_t, u16Tmp);
2195
2196 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2197 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2198 IEM_MC_REF_EFLAGS(pEFlags);
2199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2200 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2201
2202 IEM_MC_ADVANCE_RIP();
2203 IEM_MC_END();
2204 }
2205 else
2206 {
2207 /* memory operand */
2208 IEM_MC_BEGIN(3, 2);
2209 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2210 IEM_MC_ARG(uint16_t, u16Src, 1);
2211 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2212 IEM_MC_LOCAL(uint16_t, u16Tmp);
2213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2214
2215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2216 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2217 IEM_MC_ASSIGN(u16Src, u16Imm);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2220 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2221 IEM_MC_REF_EFLAGS(pEFlags);
2222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2223 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2224
2225 IEM_MC_ADVANCE_RIP();
2226 IEM_MC_END();
2227 }
2228 return VINF_SUCCESS;
2229
2230 case IEMMODE_32BIT:
2231 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2232 {
2233 /* register operand */
2234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236
2237 IEM_MC_BEGIN(3, 1);
2238 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2239 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2241 IEM_MC_LOCAL(uint32_t, u32Tmp);
2242
2243 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2244 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2245 IEM_MC_REF_EFLAGS(pEFlags);
2246 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2247 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2248
2249 IEM_MC_ADVANCE_RIP();
2250 IEM_MC_END();
2251 }
2252 else
2253 {
2254 /* memory operand */
2255 IEM_MC_BEGIN(3, 2);
2256 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2257 IEM_MC_ARG(uint32_t, u32Src, 1);
2258 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2259 IEM_MC_LOCAL(uint32_t, u32Tmp);
2260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2261
2262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2263 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2264 IEM_MC_ASSIGN(u32Src, u32Imm);
2265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2266 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2267 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2268 IEM_MC_REF_EFLAGS(pEFlags);
2269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2270 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2271
2272 IEM_MC_ADVANCE_RIP();
2273 IEM_MC_END();
2274 }
2275 return VINF_SUCCESS;
2276
2277 case IEMMODE_64BIT:
2278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2279 {
2280 /* register operand */
2281 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2283
2284 IEM_MC_BEGIN(3, 1);
2285 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2286 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2287 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2288 IEM_MC_LOCAL(uint64_t, u64Tmp);
2289
2290 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2291 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2292 IEM_MC_REF_EFLAGS(pEFlags);
2293 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2294 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2295
2296 IEM_MC_ADVANCE_RIP();
2297 IEM_MC_END();
2298 }
2299 else
2300 {
2301 /* memory operand */
2302 IEM_MC_BEGIN(3, 2);
2303 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2304 IEM_MC_ARG(uint64_t, u64Src, 1);
2305 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2306 IEM_MC_LOCAL(uint64_t, u64Tmp);
2307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2308
2309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2310 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2311 IEM_MC_ASSIGN(u64Src, u64Imm);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2314 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2315 IEM_MC_REF_EFLAGS(pEFlags);
2316 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2317 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2318
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 return VINF_SUCCESS;
2323 }
2324 AssertFailedReturn(VERR_IEM_IPE_8);
2325}
2326
2327
2328/**
2329 * @opcode 0x6c
2330 */
2331FNIEMOP_DEF(iemOp_insb_Yb_DX)
2332{
2333 IEMOP_HLP_MIN_186();
2334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2335 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2336 {
2337 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2338 switch (pVCpu->iem.s.enmEffAddrMode)
2339 {
2340 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2341 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2342 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2344 }
2345 }
2346 else
2347 {
2348 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2349 switch (pVCpu->iem.s.enmEffAddrMode)
2350 {
2351 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2352 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2353 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opcode 0x6d
2362 */
2363FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2364{
2365 IEMOP_HLP_MIN_186();
2366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2367 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2368 {
2369 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2370 switch (pVCpu->iem.s.enmEffOpSize)
2371 {
2372 case IEMMODE_16BIT:
2373 switch (pVCpu->iem.s.enmEffAddrMode)
2374 {
2375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2379 }
2380 break;
2381 case IEMMODE_64BIT:
2382 case IEMMODE_32BIT:
2383 switch (pVCpu->iem.s.enmEffAddrMode)
2384 {
2385 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2386 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2387 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2389 }
2390 break;
2391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2392 }
2393 }
2394 else
2395 {
2396 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2397 switch (pVCpu->iem.s.enmEffOpSize)
2398 {
2399 case IEMMODE_16BIT:
2400 switch (pVCpu->iem.s.enmEffAddrMode)
2401 {
2402 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2403 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2404 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2406 }
2407 break;
2408 case IEMMODE_64BIT:
2409 case IEMMODE_32BIT:
2410 switch (pVCpu->iem.s.enmEffAddrMode)
2411 {
2412 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2413 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2414 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2416 }
2417 break;
2418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2419 }
2420 }
2421}
2422
2423
2424/**
2425 * @opcode 0x6e
2426 */
2427FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2428{
2429 IEMOP_HLP_MIN_186();
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2431 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2432 {
2433 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2434 switch (pVCpu->iem.s.enmEffAddrMode)
2435 {
2436 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2437 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2438 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2440 }
2441 }
2442 else
2443 {
2444 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2445 switch (pVCpu->iem.s.enmEffAddrMode)
2446 {
2447 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2448 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2449 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2451 }
2452 }
2453}
2454
2455
2456/**
2457 * @opcode 0x6f
2458 */
2459FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2460{
2461 IEMOP_HLP_MIN_186();
2462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2463 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2464 {
2465 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2466 switch (pVCpu->iem.s.enmEffOpSize)
2467 {
2468 case IEMMODE_16BIT:
2469 switch (pVCpu->iem.s.enmEffAddrMode)
2470 {
2471 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2472 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2473 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2475 }
2476 break;
2477 case IEMMODE_64BIT:
2478 case IEMMODE_32BIT:
2479 switch (pVCpu->iem.s.enmEffAddrMode)
2480 {
2481 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2482 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2483 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2485 }
2486 break;
2487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2488 }
2489 }
2490 else
2491 {
2492 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2493 switch (pVCpu->iem.s.enmEffOpSize)
2494 {
2495 case IEMMODE_16BIT:
2496 switch (pVCpu->iem.s.enmEffAddrMode)
2497 {
2498 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2499 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2500 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2502 }
2503 break;
2504 case IEMMODE_64BIT:
2505 case IEMMODE_32BIT:
2506 switch (pVCpu->iem.s.enmEffAddrMode)
2507 {
2508 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2512 }
2513 break;
2514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2515 }
2516 }
2517}
2518
2519
2520/**
2521 * @opcode 0x70
2522 */
2523FNIEMOP_DEF(iemOp_jo_Jb)
2524{
2525 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2526 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2529
2530 IEM_MC_BEGIN(0, 0);
2531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2532 IEM_MC_REL_JMP_S8(i8Imm);
2533 } IEM_MC_ELSE() {
2534 IEM_MC_ADVANCE_RIP();
2535 } IEM_MC_ENDIF();
2536 IEM_MC_END();
2537 return VINF_SUCCESS;
2538}
2539
2540
2541/**
2542 * @opcode 0x71
2543 */
2544FNIEMOP_DEF(iemOp_jno_Jb)
2545{
2546 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2547 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2550
2551 IEM_MC_BEGIN(0, 0);
2552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2553 IEM_MC_ADVANCE_RIP();
2554 } IEM_MC_ELSE() {
2555 IEM_MC_REL_JMP_S8(i8Imm);
2556 } IEM_MC_ENDIF();
2557 IEM_MC_END();
2558 return VINF_SUCCESS;
2559}
2560
2561/**
2562 * @opcode 0x72
2563 */
2564FNIEMOP_DEF(iemOp_jc_Jb)
2565{
2566 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2567 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2570
2571 IEM_MC_BEGIN(0, 0);
2572 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2573 IEM_MC_REL_JMP_S8(i8Imm);
2574 } IEM_MC_ELSE() {
2575 IEM_MC_ADVANCE_RIP();
2576 } IEM_MC_ENDIF();
2577 IEM_MC_END();
2578 return VINF_SUCCESS;
2579}
2580
2581
2582/**
2583 * @opcode 0x73
2584 */
2585FNIEMOP_DEF(iemOp_jnc_Jb)
2586{
2587 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2588 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2591
2592 IEM_MC_BEGIN(0, 0);
2593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2594 IEM_MC_ADVANCE_RIP();
2595 } IEM_MC_ELSE() {
2596 IEM_MC_REL_JMP_S8(i8Imm);
2597 } IEM_MC_ENDIF();
2598 IEM_MC_END();
2599 return VINF_SUCCESS;
2600}
2601
2602
2603/**
2604 * @opcode 0x74
2605 */
2606FNIEMOP_DEF(iemOp_je_Jb)
2607{
2608 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2609 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2612
2613 IEM_MC_BEGIN(0, 0);
2614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2615 IEM_MC_REL_JMP_S8(i8Imm);
2616 } IEM_MC_ELSE() {
2617 IEM_MC_ADVANCE_RIP();
2618 } IEM_MC_ENDIF();
2619 IEM_MC_END();
2620 return VINF_SUCCESS;
2621}
2622
2623
2624/**
2625 * @opcode 0x75
2626 */
2627FNIEMOP_DEF(iemOp_jne_Jb)
2628{
2629 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2633
2634 IEM_MC_BEGIN(0, 0);
2635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2636 IEM_MC_ADVANCE_RIP();
2637 } IEM_MC_ELSE() {
2638 IEM_MC_REL_JMP_S8(i8Imm);
2639 } IEM_MC_ENDIF();
2640 IEM_MC_END();
2641 return VINF_SUCCESS;
2642}
2643
2644
2645/**
2646 * @opcode 0x76
2647 */
2648FNIEMOP_DEF(iemOp_jbe_Jb)
2649{
2650 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2651 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2654
2655 IEM_MC_BEGIN(0, 0);
2656 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2657 IEM_MC_REL_JMP_S8(i8Imm);
2658 } IEM_MC_ELSE() {
2659 IEM_MC_ADVANCE_RIP();
2660 } IEM_MC_ENDIF();
2661 IEM_MC_END();
2662 return VINF_SUCCESS;
2663}
2664
2665
2666/**
2667 * @opcode 0x77
2668 */
2669FNIEMOP_DEF(iemOp_jnbe_Jb)
2670{
2671 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2672 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2675
2676 IEM_MC_BEGIN(0, 0);
2677 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2678 IEM_MC_ADVANCE_RIP();
2679 } IEM_MC_ELSE() {
2680 IEM_MC_REL_JMP_S8(i8Imm);
2681 } IEM_MC_ENDIF();
2682 IEM_MC_END();
2683 return VINF_SUCCESS;
2684}
2685
2686
2687/**
2688 * @opcode 0x78
2689 */
2690FNIEMOP_DEF(iemOp_js_Jb)
2691{
2692 IEMOP_MNEMONIC(js_Jb, "js Jb");
2693 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2696
2697 IEM_MC_BEGIN(0, 0);
2698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2699 IEM_MC_REL_JMP_S8(i8Imm);
2700 } IEM_MC_ELSE() {
2701 IEM_MC_ADVANCE_RIP();
2702 } IEM_MC_ENDIF();
2703 IEM_MC_END();
2704 return VINF_SUCCESS;
2705}
2706
2707
2708/**
2709 * @opcode 0x79
2710 */
2711FNIEMOP_DEF(iemOp_jns_Jb)
2712{
2713 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2714 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2717
2718 IEM_MC_BEGIN(0, 0);
2719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2720 IEM_MC_ADVANCE_RIP();
2721 } IEM_MC_ELSE() {
2722 IEM_MC_REL_JMP_S8(i8Imm);
2723 } IEM_MC_ENDIF();
2724 IEM_MC_END();
2725 return VINF_SUCCESS;
2726}
2727
2728
2729/**
2730 * @opcode 0x7a
2731 */
2732FNIEMOP_DEF(iemOp_jp_Jb)
2733{
2734 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2737 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2738
2739 IEM_MC_BEGIN(0, 0);
2740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2741 IEM_MC_REL_JMP_S8(i8Imm);
2742 } IEM_MC_ELSE() {
2743 IEM_MC_ADVANCE_RIP();
2744 } IEM_MC_ENDIF();
2745 IEM_MC_END();
2746 return VINF_SUCCESS;
2747}
2748
2749
2750/**
2751 * @opcode 0x7b
2752 */
2753FNIEMOP_DEF(iemOp_jnp_Jb)
2754{
2755 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2756 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2759
2760 IEM_MC_BEGIN(0, 0);
2761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2762 IEM_MC_ADVANCE_RIP();
2763 } IEM_MC_ELSE() {
2764 IEM_MC_REL_JMP_S8(i8Imm);
2765 } IEM_MC_ENDIF();
2766 IEM_MC_END();
2767 return VINF_SUCCESS;
2768}
2769
2770
2771/**
2772 * @opcode 0x7c
2773 */
2774FNIEMOP_DEF(iemOp_jl_Jb)
2775{
2776 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2777 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2780
2781 IEM_MC_BEGIN(0, 0);
2782 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2783 IEM_MC_REL_JMP_S8(i8Imm);
2784 } IEM_MC_ELSE() {
2785 IEM_MC_ADVANCE_RIP();
2786 } IEM_MC_ENDIF();
2787 IEM_MC_END();
2788 return VINF_SUCCESS;
2789}
2790
2791
2792/**
2793 * @opcode 0x7d
2794 */
2795FNIEMOP_DEF(iemOp_jnl_Jb)
2796{
2797 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2798 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2801
2802 IEM_MC_BEGIN(0, 0);
2803 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2804 IEM_MC_ADVANCE_RIP();
2805 } IEM_MC_ELSE() {
2806 IEM_MC_REL_JMP_S8(i8Imm);
2807 } IEM_MC_ENDIF();
2808 IEM_MC_END();
2809 return VINF_SUCCESS;
2810}
2811
2812
2813/**
2814 * @opcode 0x7e
2815 */
2816FNIEMOP_DEF(iemOp_jle_Jb)
2817{
2818 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2819 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2822
2823 IEM_MC_BEGIN(0, 0);
2824 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2825 IEM_MC_REL_JMP_S8(i8Imm);
2826 } IEM_MC_ELSE() {
2827 IEM_MC_ADVANCE_RIP();
2828 } IEM_MC_ENDIF();
2829 IEM_MC_END();
2830 return VINF_SUCCESS;
2831}
2832
2833
2834/**
2835 * @opcode 0x7f
2836 */
2837FNIEMOP_DEF(iemOp_jnle_Jb)
2838{
2839 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2840 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2843
2844 IEM_MC_BEGIN(0, 0);
2845 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2846 IEM_MC_ADVANCE_RIP();
2847 } IEM_MC_ELSE() {
2848 IEM_MC_REL_JMP_S8(i8Imm);
2849 } IEM_MC_ENDIF();
2850 IEM_MC_END();
2851 return VINF_SUCCESS;
2852}
2853
2854
2855/**
2856 * @opcode 0x80
2857 */
2858FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2859{
2860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2861 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2862 {
2863 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2864 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2865 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2866 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2867 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2868 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2869 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2870 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2871 }
2872 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2873
2874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2875 {
2876 /* register target */
2877 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_BEGIN(3, 0);
2880 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2881 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2882 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2883
2884 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2885 IEM_MC_REF_EFLAGS(pEFlags);
2886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2887
2888 IEM_MC_ADVANCE_RIP();
2889 IEM_MC_END();
2890 }
2891 else
2892 {
2893 /* memory target */
2894 uint32_t fAccess;
2895 if (pImpl->pfnLockedU8)
2896 fAccess = IEM_ACCESS_DATA_RW;
2897 else /* CMP */
2898 fAccess = IEM_ACCESS_DATA_R;
2899 IEM_MC_BEGIN(3, 2);
2900 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2901 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2903
2904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2905 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2906 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2907 if (pImpl->pfnLockedU8)
2908 IEMOP_HLP_DONE_DECODING();
2909 else
2910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2911
2912 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2913 IEM_MC_FETCH_EFLAGS(EFlags);
2914 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2916 else
2917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2918
2919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2920 IEM_MC_COMMIT_EFLAGS(EFlags);
2921 IEM_MC_ADVANCE_RIP();
2922 IEM_MC_END();
2923 }
2924 return VINF_SUCCESS;
2925}
2926
2927
2928/**
2929 * @opcode 0x81
2930 */
2931FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2932{
2933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2934 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2935 {
2936 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2937 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2938 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2939 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2940 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2941 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2942 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2943 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2944 }
2945 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2946
2947 switch (pVCpu->iem.s.enmEffOpSize)
2948 {
2949 case IEMMODE_16BIT:
2950 {
2951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2952 {
2953 /* register target */
2954 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2956 IEM_MC_BEGIN(3, 0);
2957 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2958 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2959 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2960
2961 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2962 IEM_MC_REF_EFLAGS(pEFlags);
2963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2964
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 else
2969 {
2970 /* memory target */
2971 uint32_t fAccess;
2972 if (pImpl->pfnLockedU16)
2973 fAccess = IEM_ACCESS_DATA_RW;
2974 else /* CMP, TEST */
2975 fAccess = IEM_ACCESS_DATA_R;
2976 IEM_MC_BEGIN(3, 2);
2977 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2978 IEM_MC_ARG(uint16_t, u16Src, 1);
2979 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2981
2982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2983 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2984 IEM_MC_ASSIGN(u16Src, u16Imm);
2985 if (pImpl->pfnLockedU16)
2986 IEMOP_HLP_DONE_DECODING();
2987 else
2988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2989 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2990 IEM_MC_FETCH_EFLAGS(EFlags);
2991 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2993 else
2994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2995
2996 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2997 IEM_MC_COMMIT_EFLAGS(EFlags);
2998 IEM_MC_ADVANCE_RIP();
2999 IEM_MC_END();
3000 }
3001 break;
3002 }
3003
3004 case IEMMODE_32BIT:
3005 {
3006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3007 {
3008 /* register target */
3009 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_BEGIN(3, 0);
3012 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3013 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3015
3016 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3017 IEM_MC_REF_EFLAGS(pEFlags);
3018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3019 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3020
3021 IEM_MC_ADVANCE_RIP();
3022 IEM_MC_END();
3023 }
3024 else
3025 {
3026 /* memory target */
3027 uint32_t fAccess;
3028 if (pImpl->pfnLockedU32)
3029 fAccess = IEM_ACCESS_DATA_RW;
3030 else /* CMP, TEST */
3031 fAccess = IEM_ACCESS_DATA_R;
3032 IEM_MC_BEGIN(3, 2);
3033 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3034 IEM_MC_ARG(uint32_t, u32Src, 1);
3035 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3037
3038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3039 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3040 IEM_MC_ASSIGN(u32Src, u32Imm);
3041 if (pImpl->pfnLockedU32)
3042 IEMOP_HLP_DONE_DECODING();
3043 else
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3046 IEM_MC_FETCH_EFLAGS(EFlags);
3047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3048 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3049 else
3050 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3051
3052 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3053 IEM_MC_COMMIT_EFLAGS(EFlags);
3054 IEM_MC_ADVANCE_RIP();
3055 IEM_MC_END();
3056 }
3057 break;
3058 }
3059
3060 case IEMMODE_64BIT:
3061 {
3062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3063 {
3064 /* register target */
3065 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEM_MC_BEGIN(3, 0);
3068 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3069 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3071
3072 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3073 IEM_MC_REF_EFLAGS(pEFlags);
3074 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3075
3076 IEM_MC_ADVANCE_RIP();
3077 IEM_MC_END();
3078 }
3079 else
3080 {
3081 /* memory target */
3082 uint32_t fAccess;
3083 if (pImpl->pfnLockedU64)
3084 fAccess = IEM_ACCESS_DATA_RW;
3085 else /* CMP */
3086 fAccess = IEM_ACCESS_DATA_R;
3087 IEM_MC_BEGIN(3, 2);
3088 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3089 IEM_MC_ARG(uint64_t, u64Src, 1);
3090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3092
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3094 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3095 if (pImpl->pfnLockedU64)
3096 IEMOP_HLP_DONE_DECODING();
3097 else
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 IEM_MC_ASSIGN(u64Src, u64Imm);
3100 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3101 IEM_MC_FETCH_EFLAGS(EFlags);
3102 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3104 else
3105 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3106
3107 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3108 IEM_MC_COMMIT_EFLAGS(EFlags);
3109 IEM_MC_ADVANCE_RIP();
3110 IEM_MC_END();
3111 }
3112 break;
3113 }
3114 }
3115 return VINF_SUCCESS;
3116}
3117
3118
3119/**
3120 * @opcode 0x82
3121 * @opmnemonic grp1_82
3122 * @opgroup op_groups
3123 */
3124FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3125{
3126 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3127 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3128}
3129
3130
3131/**
3132 * @opcode 0x83
3133 */
3134FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3135{
3136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3137 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3138 {
3139 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3140 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3141 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3142 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3143 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3144 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3145 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3146 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3147 }
3148 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3149 to the 386 even if absent in the intel reference manuals and some
3150 3rd party opcode listings. */
3151 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3152
3153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3154 {
3155 /*
3156 * Register target
3157 */
3158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3159 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3160 switch (pVCpu->iem.s.enmEffOpSize)
3161 {
3162 case IEMMODE_16BIT:
3163 {
3164 IEM_MC_BEGIN(3, 0);
3165 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3166 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3167 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3168
3169 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3170 IEM_MC_REF_EFLAGS(pEFlags);
3171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3172
3173 IEM_MC_ADVANCE_RIP();
3174 IEM_MC_END();
3175 break;
3176 }
3177
3178 case IEMMODE_32BIT:
3179 {
3180 IEM_MC_BEGIN(3, 0);
3181 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3182 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3183 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3184
3185 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3186 IEM_MC_REF_EFLAGS(pEFlags);
3187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3188 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3189
3190 IEM_MC_ADVANCE_RIP();
3191 IEM_MC_END();
3192 break;
3193 }
3194
3195 case IEMMODE_64BIT:
3196 {
3197 IEM_MC_BEGIN(3, 0);
3198 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3199 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3200 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3201
3202 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3203 IEM_MC_REF_EFLAGS(pEFlags);
3204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3205
3206 IEM_MC_ADVANCE_RIP();
3207 IEM_MC_END();
3208 break;
3209 }
3210 }
3211 }
3212 else
3213 {
3214 /*
3215 * Memory target.
3216 */
3217 uint32_t fAccess;
3218 if (pImpl->pfnLockedU16)
3219 fAccess = IEM_ACCESS_DATA_RW;
3220 else /* CMP */
3221 fAccess = IEM_ACCESS_DATA_R;
3222
3223 switch (pVCpu->iem.s.enmEffOpSize)
3224 {
3225 case IEMMODE_16BIT:
3226 {
3227 IEM_MC_BEGIN(3, 2);
3228 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3229 IEM_MC_ARG(uint16_t, u16Src, 1);
3230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3232
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3235 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3236 if (pImpl->pfnLockedU16)
3237 IEMOP_HLP_DONE_DECODING();
3238 else
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3241 IEM_MC_FETCH_EFLAGS(EFlags);
3242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3244 else
3245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3246
3247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3248 IEM_MC_COMMIT_EFLAGS(EFlags);
3249 IEM_MC_ADVANCE_RIP();
3250 IEM_MC_END();
3251 break;
3252 }
3253
3254 case IEMMODE_32BIT:
3255 {
3256 IEM_MC_BEGIN(3, 2);
3257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3258 IEM_MC_ARG(uint32_t, u32Src, 1);
3259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3261
3262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3263 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3264 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3265 if (pImpl->pfnLockedU32)
3266 IEMOP_HLP_DONE_DECODING();
3267 else
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3270 IEM_MC_FETCH_EFLAGS(EFlags);
3271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3273 else
3274 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3275
3276 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3277 IEM_MC_COMMIT_EFLAGS(EFlags);
3278 IEM_MC_ADVANCE_RIP();
3279 IEM_MC_END();
3280 break;
3281 }
3282
3283 case IEMMODE_64BIT:
3284 {
3285 IEM_MC_BEGIN(3, 2);
3286 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3287 IEM_MC_ARG(uint64_t, u64Src, 1);
3288 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3290
3291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3292 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3293 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3294 if (pImpl->pfnLockedU64)
3295 IEMOP_HLP_DONE_DECODING();
3296 else
3297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3298 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3299 IEM_MC_FETCH_EFLAGS(EFlags);
3300 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3301 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3302 else
3303 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3304
3305 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3306 IEM_MC_COMMIT_EFLAGS(EFlags);
3307 IEM_MC_ADVANCE_RIP();
3308 IEM_MC_END();
3309 break;
3310 }
3311 }
3312 }
3313 return VINF_SUCCESS;
3314}
3315
3316
3317/**
3318 * @opcode 0x84
3319 */
3320FNIEMOP_DEF(iemOp_test_Eb_Gb)
3321{
3322 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3325}
3326
3327
3328/**
3329 * @opcode 0x85
3330 */
3331FNIEMOP_DEF(iemOp_test_Ev_Gv)
3332{
3333 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3336}
3337
3338
3339/**
3340 * @opcode 0x86
3341 */
3342FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3343{
3344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3345 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3346
3347 /*
3348 * If rm is denoting a register, no more instruction bytes.
3349 */
3350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3351 {
3352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3353
3354 IEM_MC_BEGIN(0, 2);
3355 IEM_MC_LOCAL(uint8_t, uTmp1);
3356 IEM_MC_LOCAL(uint8_t, uTmp2);
3357
3358 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3359 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3360 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3361 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3362
3363 IEM_MC_ADVANCE_RIP();
3364 IEM_MC_END();
3365 }
3366 else
3367 {
3368 /*
3369 * We're accessing memory.
3370 */
3371/** @todo the register must be committed separately! */
3372 IEM_MC_BEGIN(2, 2);
3373 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3374 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3376
3377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3378 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3379 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3380 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3381 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3382
3383 IEM_MC_ADVANCE_RIP();
3384 IEM_MC_END();
3385 }
3386 return VINF_SUCCESS;
3387}
3388
3389
3390/**
3391 * @opcode 0x87
3392 */
3393FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3394{
3395 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3397
3398 /*
3399 * If rm is denoting a register, no more instruction bytes.
3400 */
3401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3402 {
3403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3404
3405 switch (pVCpu->iem.s.enmEffOpSize)
3406 {
3407 case IEMMODE_16BIT:
3408 IEM_MC_BEGIN(0, 2);
3409 IEM_MC_LOCAL(uint16_t, uTmp1);
3410 IEM_MC_LOCAL(uint16_t, uTmp2);
3411
3412 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3413 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3414 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3415 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3416
3417 IEM_MC_ADVANCE_RIP();
3418 IEM_MC_END();
3419 return VINF_SUCCESS;
3420
3421 case IEMMODE_32BIT:
3422 IEM_MC_BEGIN(0, 2);
3423 IEM_MC_LOCAL(uint32_t, uTmp1);
3424 IEM_MC_LOCAL(uint32_t, uTmp2);
3425
3426 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3427 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3428 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3429 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3430
3431 IEM_MC_ADVANCE_RIP();
3432 IEM_MC_END();
3433 return VINF_SUCCESS;
3434
3435 case IEMMODE_64BIT:
3436 IEM_MC_BEGIN(0, 2);
3437 IEM_MC_LOCAL(uint64_t, uTmp1);
3438 IEM_MC_LOCAL(uint64_t, uTmp2);
3439
3440 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3441 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3442 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3443 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3444
3445 IEM_MC_ADVANCE_RIP();
3446 IEM_MC_END();
3447 return VINF_SUCCESS;
3448
3449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3450 }
3451 }
3452 else
3453 {
3454 /*
3455 * We're accessing memory.
3456 */
3457 switch (pVCpu->iem.s.enmEffOpSize)
3458 {
3459/** @todo the register must be committed separately! */
3460 case IEMMODE_16BIT:
3461 IEM_MC_BEGIN(2, 2);
3462 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3463 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3465
3466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3467 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3468 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3469 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3471
3472 IEM_MC_ADVANCE_RIP();
3473 IEM_MC_END();
3474 return VINF_SUCCESS;
3475
3476 case IEMMODE_32BIT:
3477 IEM_MC_BEGIN(2, 2);
3478 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3479 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3481
3482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3483 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3484 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3485 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3486 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3487
3488 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3489 IEM_MC_ADVANCE_RIP();
3490 IEM_MC_END();
3491 return VINF_SUCCESS;
3492
3493 case IEMMODE_64BIT:
3494 IEM_MC_BEGIN(2, 2);
3495 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3496 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3498
3499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3500 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3501 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3502 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3503 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3504
3505 IEM_MC_ADVANCE_RIP();
3506 IEM_MC_END();
3507 return VINF_SUCCESS;
3508
3509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3510 }
3511 }
3512}
3513
3514
3515/**
3516 * @opcode 0x88
3517 */
3518FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3519{
3520 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3521
3522 uint8_t bRm;
3523 IEM_OPCODE_GET_NEXT_U8(&bRm);
3524
3525 /*
3526 * If rm is denoting a register, no more instruction bytes.
3527 */
3528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3529 {
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3531 IEM_MC_BEGIN(0, 1);
3532 IEM_MC_LOCAL(uint8_t, u8Value);
3533 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3534 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3535 IEM_MC_ADVANCE_RIP();
3536 IEM_MC_END();
3537 }
3538 else
3539 {
3540 /*
3541 * We're writing a register to memory.
3542 */
3543 IEM_MC_BEGIN(0, 2);
3544 IEM_MC_LOCAL(uint8_t, u8Value);
3545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3548 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3549 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3550 IEM_MC_ADVANCE_RIP();
3551 IEM_MC_END();
3552 }
3553 return VINF_SUCCESS;
3554
3555}
3556
3557
3558/**
3559 * @opcode 0x89
3560 */
3561FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3562{
3563 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3564
3565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3566
3567 /*
3568 * If rm is denoting a register, no more instruction bytes.
3569 */
3570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3571 {
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573 switch (pVCpu->iem.s.enmEffOpSize)
3574 {
3575 case IEMMODE_16BIT:
3576 IEM_MC_BEGIN(0, 1);
3577 IEM_MC_LOCAL(uint16_t, u16Value);
3578 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3579 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3580 IEM_MC_ADVANCE_RIP();
3581 IEM_MC_END();
3582 break;
3583
3584 case IEMMODE_32BIT:
3585 IEM_MC_BEGIN(0, 1);
3586 IEM_MC_LOCAL(uint32_t, u32Value);
3587 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3588 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3589 IEM_MC_ADVANCE_RIP();
3590 IEM_MC_END();
3591 break;
3592
3593 case IEMMODE_64BIT:
3594 IEM_MC_BEGIN(0, 1);
3595 IEM_MC_LOCAL(uint64_t, u64Value);
3596 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3597 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3598 IEM_MC_ADVANCE_RIP();
3599 IEM_MC_END();
3600 break;
3601 }
3602 }
3603 else
3604 {
3605 /*
3606 * We're writing a register to memory.
3607 */
3608 switch (pVCpu->iem.s.enmEffOpSize)
3609 {
3610 case IEMMODE_16BIT:
3611 IEM_MC_BEGIN(0, 2);
3612 IEM_MC_LOCAL(uint16_t, u16Value);
3613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3617 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3618 IEM_MC_ADVANCE_RIP();
3619 IEM_MC_END();
3620 break;
3621
3622 case IEMMODE_32BIT:
3623 IEM_MC_BEGIN(0, 2);
3624 IEM_MC_LOCAL(uint32_t, u32Value);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3628 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3629 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3630 IEM_MC_ADVANCE_RIP();
3631 IEM_MC_END();
3632 break;
3633
3634 case IEMMODE_64BIT:
3635 IEM_MC_BEGIN(0, 2);
3636 IEM_MC_LOCAL(uint64_t, u64Value);
3637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3641 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3642 IEM_MC_ADVANCE_RIP();
3643 IEM_MC_END();
3644 break;
3645 }
3646 }
3647 return VINF_SUCCESS;
3648}
3649
3650
3651/**
3652 * @opcode 0x8a
3653 */
3654FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3655{
3656 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3657
3658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3659
3660 /*
3661 * If rm is denoting a register, no more instruction bytes.
3662 */
3663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3664 {
3665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3666 IEM_MC_BEGIN(0, 1);
3667 IEM_MC_LOCAL(uint8_t, u8Value);
3668 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3669 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3670 IEM_MC_ADVANCE_RIP();
3671 IEM_MC_END();
3672 }
3673 else
3674 {
3675 /*
3676 * We're loading a register from memory.
3677 */
3678 IEM_MC_BEGIN(0, 2);
3679 IEM_MC_LOCAL(uint8_t, u8Value);
3680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3683 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3684 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3685 IEM_MC_ADVANCE_RIP();
3686 IEM_MC_END();
3687 }
3688 return VINF_SUCCESS;
3689}
3690
3691
3692/**
3693 * @opcode 0x8b
3694 */
3695FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3696{
3697 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3698
3699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3700
3701 /*
3702 * If rm is denoting a register, no more instruction bytes.
3703 */
3704 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3705 {
3706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3707 switch (pVCpu->iem.s.enmEffOpSize)
3708 {
3709 case IEMMODE_16BIT:
3710 IEM_MC_BEGIN(0, 1);
3711 IEM_MC_LOCAL(uint16_t, u16Value);
3712 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3713 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3714 IEM_MC_ADVANCE_RIP();
3715 IEM_MC_END();
3716 break;
3717
3718 case IEMMODE_32BIT:
3719 IEM_MC_BEGIN(0, 1);
3720 IEM_MC_LOCAL(uint32_t, u32Value);
3721 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3722 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3723 IEM_MC_ADVANCE_RIP();
3724 IEM_MC_END();
3725 break;
3726
3727 case IEMMODE_64BIT:
3728 IEM_MC_BEGIN(0, 1);
3729 IEM_MC_LOCAL(uint64_t, u64Value);
3730 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3731 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3732 IEM_MC_ADVANCE_RIP();
3733 IEM_MC_END();
3734 break;
3735 }
3736 }
3737 else
3738 {
3739 /*
3740 * We're loading a register from memory.
3741 */
3742 switch (pVCpu->iem.s.enmEffOpSize)
3743 {
3744 case IEMMODE_16BIT:
3745 IEM_MC_BEGIN(0, 2);
3746 IEM_MC_LOCAL(uint16_t, u16Value);
3747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3750 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3751 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3752 IEM_MC_ADVANCE_RIP();
3753 IEM_MC_END();
3754 break;
3755
3756 case IEMMODE_32BIT:
3757 IEM_MC_BEGIN(0, 2);
3758 IEM_MC_LOCAL(uint32_t, u32Value);
3759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3763 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3764 IEM_MC_ADVANCE_RIP();
3765 IEM_MC_END();
3766 break;
3767
3768 case IEMMODE_64BIT:
3769 IEM_MC_BEGIN(0, 2);
3770 IEM_MC_LOCAL(uint64_t, u64Value);
3771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3774 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3775 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3776 IEM_MC_ADVANCE_RIP();
3777 IEM_MC_END();
3778 break;
3779 }
3780 }
3781 return VINF_SUCCESS;
3782}
3783
3784
3785/**
3786 * opcode 0x63
3787 * @todo Table fixme
3788 */
3789FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3790{
3791 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3792 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3793 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3794 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3795 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3796}
3797
3798
3799/**
3800 * @opcode 0x8c
3801 */
3802FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3803{
3804 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3805
3806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3807
3808 /*
3809 * Check that the destination register exists. The REX.R prefix is ignored.
3810 */
3811 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3812 if ( iSegReg > X86_SREG_GS)
3813 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3814
3815 /*
3816 * If rm is denoting a register, no more instruction bytes.
3817 * In that case, the operand size is respected and the upper bits are
3818 * cleared (starting with some pentium).
3819 */
3820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3821 {
3822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3823 switch (pVCpu->iem.s.enmEffOpSize)
3824 {
3825 case IEMMODE_16BIT:
3826 IEM_MC_BEGIN(0, 1);
3827 IEM_MC_LOCAL(uint16_t, u16Value);
3828 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3829 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3830 IEM_MC_ADVANCE_RIP();
3831 IEM_MC_END();
3832 break;
3833
3834 case IEMMODE_32BIT:
3835 IEM_MC_BEGIN(0, 1);
3836 IEM_MC_LOCAL(uint32_t, u32Value);
3837 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3838 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3839 IEM_MC_ADVANCE_RIP();
3840 IEM_MC_END();
3841 break;
3842
3843 case IEMMODE_64BIT:
3844 IEM_MC_BEGIN(0, 1);
3845 IEM_MC_LOCAL(uint64_t, u64Value);
3846 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3847 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3848 IEM_MC_ADVANCE_RIP();
3849 IEM_MC_END();
3850 break;
3851 }
3852 }
3853 else
3854 {
3855 /*
3856 * We're saving the register to memory. The access is word sized
3857 * regardless of operand size prefixes.
3858 */
3859#if 0 /* not necessary */
3860 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3861#endif
3862 IEM_MC_BEGIN(0, 2);
3863 IEM_MC_LOCAL(uint16_t, u16Value);
3864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3868 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3869 IEM_MC_ADVANCE_RIP();
3870 IEM_MC_END();
3871 }
3872 return VINF_SUCCESS;
3873}
3874
3875
3876
3877
3878/**
3879 * @opcode 0x8d
3880 */
3881FNIEMOP_DEF(iemOp_lea_Gv_M)
3882{
3883 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3886 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3887
3888 switch (pVCpu->iem.s.enmEffOpSize)
3889 {
3890 case IEMMODE_16BIT:
3891 IEM_MC_BEGIN(0, 2);
3892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3893 IEM_MC_LOCAL(uint16_t, u16Cast);
3894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3897 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3898 IEM_MC_ADVANCE_RIP();
3899 IEM_MC_END();
3900 return VINF_SUCCESS;
3901
3902 case IEMMODE_32BIT:
3903 IEM_MC_BEGIN(0, 2);
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3905 IEM_MC_LOCAL(uint32_t, u32Cast);
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3908 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3909 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3910 IEM_MC_ADVANCE_RIP();
3911 IEM_MC_END();
3912 return VINF_SUCCESS;
3913
3914 case IEMMODE_64BIT:
3915 IEM_MC_BEGIN(0, 1);
3916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3919 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3920 IEM_MC_ADVANCE_RIP();
3921 IEM_MC_END();
3922 return VINF_SUCCESS;
3923 }
3924 AssertFailedReturn(VERR_IEM_IPE_7);
3925}
3926
3927
3928/**
3929 * @opcode 0x8e
3930 */
3931FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3932{
3933 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3934
3935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3936
3937 /*
3938 * The practical operand size is 16-bit.
3939 */
3940#if 0 /* not necessary */
3941 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3942#endif
3943
3944 /*
3945 * Check that the destination register exists and can be used with this
3946 * instruction. The REX.R prefix is ignored.
3947 */
3948 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3949 if ( iSegReg == X86_SREG_CS
3950 || iSegReg > X86_SREG_GS)
3951 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3952
3953 /*
3954 * If rm is denoting a register, no more instruction bytes.
3955 */
3956 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3957 {
3958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3959 IEM_MC_BEGIN(2, 0);
3960 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3961 IEM_MC_ARG(uint16_t, u16Value, 1);
3962 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3963 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3964 IEM_MC_END();
3965 }
3966 else
3967 {
3968 /*
3969 * We're loading the register from memory. The access is word sized
3970 * regardless of operand size prefixes.
3971 */
3972 IEM_MC_BEGIN(2, 1);
3973 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3974 IEM_MC_ARG(uint16_t, u16Value, 1);
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3978 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3979 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3980 IEM_MC_END();
3981 }
3982 return VINF_SUCCESS;
3983}
3984
3985
3986/** Opcode 0x8f /0. */
3987FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3988{
3989 /* This bugger is rather annoying as it requires rSP to be updated before
3990 doing the effective address calculations. Will eventually require a
3991 split between the R/M+SIB decoding and the effective address
3992 calculation - which is something that is required for any attempt at
3993 reusing this code for a recompiler. It may also be good to have if we
3994 need to delay #UD exception caused by invalid lock prefixes.
3995
3996 For now, we'll do a mostly safe interpreter-only implementation here. */
3997 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3998 * now until tests show it's checked.. */
3999 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4000
4001 /* Register access is relatively easy and can share code. */
4002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4003 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4004
4005 /*
4006 * Memory target.
4007 *
4008 * Intel says that RSP is incremented before it's used in any effective
4009 * address calcuations. This means some serious extra annoyance here since
4010 * we decode and calculate the effective address in one step and like to
4011 * delay committing registers till everything is done.
4012 *
4013 * So, we'll decode and calculate the effective address twice. This will
4014 * require some recoding if turned into a recompiler.
4015 */
4016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4017
4018#ifndef TST_IEM_CHECK_MC
4019 /* Calc effective address with modified ESP. */
4020/** @todo testcase */
4021 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4022 RTGCPTR GCPtrEff;
4023 VBOXSTRICTRC rcStrict;
4024 switch (pVCpu->iem.s.enmEffOpSize)
4025 {
4026 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4027 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4028 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4030 }
4031 if (rcStrict != VINF_SUCCESS)
4032 return rcStrict;
4033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4034
4035 /* Perform the operation - this should be CImpl. */
4036 RTUINT64U TmpRsp;
4037 TmpRsp.u = pCtx->rsp;
4038 switch (pVCpu->iem.s.enmEffOpSize)
4039 {
4040 case IEMMODE_16BIT:
4041 {
4042 uint16_t u16Value;
4043 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4044 if (rcStrict == VINF_SUCCESS)
4045 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4046 break;
4047 }
4048
4049 case IEMMODE_32BIT:
4050 {
4051 uint32_t u32Value;
4052 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4053 if (rcStrict == VINF_SUCCESS)
4054 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4055 break;
4056 }
4057
4058 case IEMMODE_64BIT:
4059 {
4060 uint64_t u64Value;
4061 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4062 if (rcStrict == VINF_SUCCESS)
4063 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4064 break;
4065 }
4066
4067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4068 }
4069 if (rcStrict == VINF_SUCCESS)
4070 {
4071 pCtx->rsp = TmpRsp.u;
4072 iemRegUpdateRipAndClearRF(pVCpu);
4073 }
4074 return rcStrict;
4075
4076#else
4077 return VERR_IEM_IPE_2;
4078#endif
4079}
4080
4081
4082/**
4083 * @opcode 0x8f
4084 */
4085FNIEMOP_DEF(iemOp_Grp1A__xop)
4086{
4087 /*
4088 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4089 * three byte VEX prefix, except that the mmmmm field cannot have the values
4090 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4091 */
4092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4093 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4094 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4095
4096 IEMOP_MNEMONIC(xop, "xop");
4097 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4098 {
4099 /** @todo Test when exctly the XOP conformance checks kick in during
4100 * instruction decoding and fetching (using \#PF). */
4101 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4102 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4103 if ( ( pVCpu->iem.s.fPrefixes
4104 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4105 == 0)
4106 {
4107 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4108 if (bXop2 & 0x80 /* XOP.W */)
4109 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4110 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4111 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4112 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4113 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4114 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4115 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4116
4117 /** @todo XOP: Just use new tables and decoders. */
4118 switch (bRm & 0x1f)
4119 {
4120 case 8: /* xop opcode map 8. */
4121 IEMOP_BITCH_ABOUT_STUB();
4122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4123
4124 case 9: /* xop opcode map 9. */
4125 IEMOP_BITCH_ABOUT_STUB();
4126 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4127
4128 case 10: /* xop opcode map 10. */
4129 IEMOP_BITCH_ABOUT_STUB();
4130 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4131
4132 default:
4133 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4134 return IEMOP_RAISE_INVALID_OPCODE();
4135 }
4136 }
4137 else
4138 Log(("XOP: Invalid prefix mix!\n"));
4139 }
4140 else
4141 Log(("XOP: XOP support disabled!\n"));
4142 return IEMOP_RAISE_INVALID_OPCODE();
4143}
4144
4145
4146/**
4147 * Common 'xchg reg,rAX' helper.
4148 */
4149FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4150{
4151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4152
4153 iReg |= pVCpu->iem.s.uRexB;
4154 switch (pVCpu->iem.s.enmEffOpSize)
4155 {
4156 case IEMMODE_16BIT:
4157 IEM_MC_BEGIN(0, 2);
4158 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4159 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4160 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4161 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4162 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4163 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 return VINF_SUCCESS;
4167
4168 case IEMMODE_32BIT:
4169 IEM_MC_BEGIN(0, 2);
4170 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4171 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4172 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4173 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4174 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4175 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 return VINF_SUCCESS;
4179
4180 case IEMMODE_64BIT:
4181 IEM_MC_BEGIN(0, 2);
4182 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4183 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4184 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4185 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4186 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4187 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4188 IEM_MC_ADVANCE_RIP();
4189 IEM_MC_END();
4190 return VINF_SUCCESS;
4191
4192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4193 }
4194}
4195
4196
4197/**
4198 * @opcode 0x90
4199 */
4200FNIEMOP_DEF(iemOp_nop)
4201{
4202 /* R8/R8D and RAX/EAX can be exchanged. */
4203 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4204 {
4205 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4206 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4207 }
4208
4209 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4210 IEMOP_MNEMONIC(pause, "pause");
4211 else
4212 IEMOP_MNEMONIC(nop, "nop");
4213 IEM_MC_BEGIN(0, 0);
4214 IEM_MC_ADVANCE_RIP();
4215 IEM_MC_END();
4216 return VINF_SUCCESS;
4217}
4218
4219
4220/**
4221 * @opcode 0x91
4222 */
4223FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4224{
4225 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4226 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4227}
4228
4229
4230/**
4231 * @opcode 0x92
4232 */
4233FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4234{
4235 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4236 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4237}
4238
4239
4240/**
4241 * @opcode 0x93
4242 */
4243FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4244{
4245 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4246 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4247}
4248
4249
4250/**
4251 * @opcode 0x94
4252 */
4253FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4254{
4255 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4256 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4257}
4258
4259
4260/**
4261 * @opcode 0x95
4262 */
4263FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4264{
4265 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4266 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4267}
4268
4269
4270/**
4271 * @opcode 0x96
4272 */
4273FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4274{
4275 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4276 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4277}
4278
4279
4280/**
4281 * @opcode 0x97
4282 */
4283FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4284{
4285 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4286 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4287}
4288
4289
4290/**
4291 * @opcode 0x98
4292 */
4293FNIEMOP_DEF(iemOp_cbw)
4294{
4295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4296 switch (pVCpu->iem.s.enmEffOpSize)
4297 {
4298 case IEMMODE_16BIT:
4299 IEMOP_MNEMONIC(cbw, "cbw");
4300 IEM_MC_BEGIN(0, 1);
4301 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4302 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4303 } IEM_MC_ELSE() {
4304 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4305 } IEM_MC_ENDIF();
4306 IEM_MC_ADVANCE_RIP();
4307 IEM_MC_END();
4308 return VINF_SUCCESS;
4309
4310 case IEMMODE_32BIT:
4311 IEMOP_MNEMONIC(cwde, "cwde");
4312 IEM_MC_BEGIN(0, 1);
4313 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4314 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4315 } IEM_MC_ELSE() {
4316 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4317 } IEM_MC_ENDIF();
4318 IEM_MC_ADVANCE_RIP();
4319 IEM_MC_END();
4320 return VINF_SUCCESS;
4321
4322 case IEMMODE_64BIT:
4323 IEMOP_MNEMONIC(cdqe, "cdqe");
4324 IEM_MC_BEGIN(0, 1);
4325 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4326 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4327 } IEM_MC_ELSE() {
4328 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4329 } IEM_MC_ENDIF();
4330 IEM_MC_ADVANCE_RIP();
4331 IEM_MC_END();
4332 return VINF_SUCCESS;
4333
4334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4335 }
4336}
4337
4338
4339/**
4340 * @opcode 0x99
4341 */
4342FNIEMOP_DEF(iemOp_cwd)
4343{
4344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4345 switch (pVCpu->iem.s.enmEffOpSize)
4346 {
4347 case IEMMODE_16BIT:
4348 IEMOP_MNEMONIC(cwd, "cwd");
4349 IEM_MC_BEGIN(0, 1);
4350 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4351 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4352 } IEM_MC_ELSE() {
4353 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4354 } IEM_MC_ENDIF();
4355 IEM_MC_ADVANCE_RIP();
4356 IEM_MC_END();
4357 return VINF_SUCCESS;
4358
4359 case IEMMODE_32BIT:
4360 IEMOP_MNEMONIC(cdq, "cdq");
4361 IEM_MC_BEGIN(0, 1);
4362 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4363 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4364 } IEM_MC_ELSE() {
4365 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4366 } IEM_MC_ENDIF();
4367 IEM_MC_ADVANCE_RIP();
4368 IEM_MC_END();
4369 return VINF_SUCCESS;
4370
4371 case IEMMODE_64BIT:
4372 IEMOP_MNEMONIC(cqo, "cqo");
4373 IEM_MC_BEGIN(0, 1);
4374 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4375 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4376 } IEM_MC_ELSE() {
4377 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_ADVANCE_RIP();
4380 IEM_MC_END();
4381 return VINF_SUCCESS;
4382
4383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4384 }
4385}
4386
4387
4388/**
4389 * @opcode 0x9a
4390 */
4391FNIEMOP_DEF(iemOp_call_Ap)
4392{
4393 IEMOP_MNEMONIC(call_Ap, "call Ap");
4394 IEMOP_HLP_NO_64BIT();
4395
4396 /* Decode the far pointer address and pass it on to the far call C implementation. */
4397 uint32_t offSeg;
4398 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4399 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4400 else
4401 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4402 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4404 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4405}
4406
4407
4408/** Opcode 0x9b. (aka fwait) */
4409FNIEMOP_DEF(iemOp_wait)
4410{
4411 IEMOP_MNEMONIC(wait, "wait");
4412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4413
4414 IEM_MC_BEGIN(0, 0);
4415 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4417 IEM_MC_ADVANCE_RIP();
4418 IEM_MC_END();
4419 return VINF_SUCCESS;
4420}
4421
4422
4423/**
4424 * @opcode 0x9c
4425 */
4426FNIEMOP_DEF(iemOp_pushf_Fv)
4427{
4428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4430 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4431}
4432
4433
4434/**
4435 * @opcode 0x9d
4436 */
4437FNIEMOP_DEF(iemOp_popf_Fv)
4438{
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4441 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4442}
4443
4444
4445/**
4446 * @opcode 0x9e
4447 */
4448FNIEMOP_DEF(iemOp_sahf)
4449{
4450 IEMOP_MNEMONIC(sahf, "sahf");
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4452 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4453 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4454 return IEMOP_RAISE_INVALID_OPCODE();
4455 IEM_MC_BEGIN(0, 2);
4456 IEM_MC_LOCAL(uint32_t, u32Flags);
4457 IEM_MC_LOCAL(uint32_t, EFlags);
4458 IEM_MC_FETCH_EFLAGS(EFlags);
4459 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4460 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4461 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4462 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4463 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4464 IEM_MC_COMMIT_EFLAGS(EFlags);
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/**
4472 * @opcode 0x9f
4473 */
4474FNIEMOP_DEF(iemOp_lahf)
4475{
4476 IEMOP_MNEMONIC(lahf, "lahf");
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4478 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4479 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4480 return IEMOP_RAISE_INVALID_OPCODE();
4481 IEM_MC_BEGIN(0, 1);
4482 IEM_MC_LOCAL(uint8_t, u8Flags);
4483 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4484 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4485 IEM_MC_ADVANCE_RIP();
4486 IEM_MC_END();
4487 return VINF_SUCCESS;
4488}
4489
4490
4491/**
4492 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4493 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4494 * prefixes. Will return on failures.
4495 * @param a_GCPtrMemOff The variable to store the offset in.
4496 */
4497#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4498 do \
4499 { \
4500 switch (pVCpu->iem.s.enmEffAddrMode) \
4501 { \
4502 case IEMMODE_16BIT: \
4503 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4504 break; \
4505 case IEMMODE_32BIT: \
4506 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4507 break; \
4508 case IEMMODE_64BIT: \
4509 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4510 break; \
4511 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4512 } \
4513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4514 } while (0)
4515
4516/**
4517 * @opcode 0xa0
4518 */
4519FNIEMOP_DEF(iemOp_mov_AL_Ob)
4520{
4521 /*
4522 * Get the offset and fend of lock prefixes.
4523 */
4524 RTGCPTR GCPtrMemOff;
4525 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4526
4527 /*
4528 * Fetch AL.
4529 */
4530 IEM_MC_BEGIN(0,1);
4531 IEM_MC_LOCAL(uint8_t, u8Tmp);
4532 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4533 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4534 IEM_MC_ADVANCE_RIP();
4535 IEM_MC_END();
4536 return VINF_SUCCESS;
4537}
4538
4539
4540/**
4541 * @opcode 0xa1
4542 */
4543FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4544{
4545 /*
4546 * Get the offset and fend of lock prefixes.
4547 */
4548 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4549 RTGCPTR GCPtrMemOff;
4550 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4551
4552 /*
4553 * Fetch rAX.
4554 */
4555 switch (pVCpu->iem.s.enmEffOpSize)
4556 {
4557 case IEMMODE_16BIT:
4558 IEM_MC_BEGIN(0,1);
4559 IEM_MC_LOCAL(uint16_t, u16Tmp);
4560 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4561 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4562 IEM_MC_ADVANCE_RIP();
4563 IEM_MC_END();
4564 return VINF_SUCCESS;
4565
4566 case IEMMODE_32BIT:
4567 IEM_MC_BEGIN(0,1);
4568 IEM_MC_LOCAL(uint32_t, u32Tmp);
4569 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4570 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4571 IEM_MC_ADVANCE_RIP();
4572 IEM_MC_END();
4573 return VINF_SUCCESS;
4574
4575 case IEMMODE_64BIT:
4576 IEM_MC_BEGIN(0,1);
4577 IEM_MC_LOCAL(uint64_t, u64Tmp);
4578 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4579 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4580 IEM_MC_ADVANCE_RIP();
4581 IEM_MC_END();
4582 return VINF_SUCCESS;
4583
4584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4585 }
4586}
4587
4588
4589/**
4590 * @opcode 0xa2
4591 */
4592FNIEMOP_DEF(iemOp_mov_Ob_AL)
4593{
4594 /*
4595 * Get the offset and fend of lock prefixes.
4596 */
4597 RTGCPTR GCPtrMemOff;
4598 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4599
4600 /*
4601 * Store AL.
4602 */
4603 IEM_MC_BEGIN(0,1);
4604 IEM_MC_LOCAL(uint8_t, u8Tmp);
4605 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4606 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 return VINF_SUCCESS;
4610}
4611
4612
4613/**
4614 * @opcode 0xa3
4615 */
4616FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4617{
4618 /*
4619 * Get the offset and fend of lock prefixes.
4620 */
4621 RTGCPTR GCPtrMemOff;
4622 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4623
4624 /*
4625 * Store rAX.
4626 */
4627 switch (pVCpu->iem.s.enmEffOpSize)
4628 {
4629 case IEMMODE_16BIT:
4630 IEM_MC_BEGIN(0,1);
4631 IEM_MC_LOCAL(uint16_t, u16Tmp);
4632 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4633 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 return VINF_SUCCESS;
4637
4638 case IEMMODE_32BIT:
4639 IEM_MC_BEGIN(0,1);
4640 IEM_MC_LOCAL(uint32_t, u32Tmp);
4641 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4642 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4643 IEM_MC_ADVANCE_RIP();
4644 IEM_MC_END();
4645 return VINF_SUCCESS;
4646
4647 case IEMMODE_64BIT:
4648 IEM_MC_BEGIN(0,1);
4649 IEM_MC_LOCAL(uint64_t, u64Tmp);
4650 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4651 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4652 IEM_MC_ADVANCE_RIP();
4653 IEM_MC_END();
4654 return VINF_SUCCESS;
4655
4656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4657 }
4658}
4659
4660/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4661#define IEM_MOVS_CASE(ValBits, AddrBits) \
4662 IEM_MC_BEGIN(0, 2); \
4663 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4664 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4665 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4666 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4667 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4668 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4670 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4671 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4672 } IEM_MC_ELSE() { \
4673 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4674 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4675 } IEM_MC_ENDIF(); \
4676 IEM_MC_ADVANCE_RIP(); \
4677 IEM_MC_END();
4678
4679/**
4680 * @opcode 0xa4
4681 */
4682FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4683{
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685
4686 /*
4687 * Use the C implementation if a repeat prefix is encountered.
4688 */
4689 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4690 {
4691 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4692 switch (pVCpu->iem.s.enmEffAddrMode)
4693 {
4694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4698 }
4699 }
4700 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4701
4702 /*
4703 * Sharing case implementation with movs[wdq] below.
4704 */
4705 switch (pVCpu->iem.s.enmEffAddrMode)
4706 {
4707 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4708 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4709 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4711 }
4712 return VINF_SUCCESS;
4713}
4714
4715
4716/**
4717 * @opcode 0xa5
4718 */
4719FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4720{
4721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4722
4723 /*
4724 * Use the C implementation if a repeat prefix is encountered.
4725 */
4726 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4727 {
4728 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4729 switch (pVCpu->iem.s.enmEffOpSize)
4730 {
4731 case IEMMODE_16BIT:
4732 switch (pVCpu->iem.s.enmEffAddrMode)
4733 {
4734 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4735 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4736 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4738 }
4739 break;
4740 case IEMMODE_32BIT:
4741 switch (pVCpu->iem.s.enmEffAddrMode)
4742 {
4743 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4744 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4745 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4747 }
4748 case IEMMODE_64BIT:
4749 switch (pVCpu->iem.s.enmEffAddrMode)
4750 {
4751 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4752 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4753 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4755 }
4756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4757 }
4758 }
4759 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4760
4761 /*
4762 * Annoying double switch here.
4763 * Using ugly macro for implementing the cases, sharing it with movsb.
4764 */
4765 switch (pVCpu->iem.s.enmEffOpSize)
4766 {
4767 case IEMMODE_16BIT:
4768 switch (pVCpu->iem.s.enmEffAddrMode)
4769 {
4770 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4771 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4772 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4774 }
4775 break;
4776
4777 case IEMMODE_32BIT:
4778 switch (pVCpu->iem.s.enmEffAddrMode)
4779 {
4780 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4781 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4782 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4784 }
4785 break;
4786
4787 case IEMMODE_64BIT:
4788 switch (pVCpu->iem.s.enmEffAddrMode)
4789 {
4790 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4791 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4792 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4794 }
4795 break;
4796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4797 }
4798 return VINF_SUCCESS;
4799}
4800
4801#undef IEM_MOVS_CASE
4802
4803/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4804#define IEM_CMPS_CASE(ValBits, AddrBits) \
4805 IEM_MC_BEGIN(3, 3); \
4806 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4807 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4808 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4809 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4810 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4811 \
4812 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4813 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4814 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4815 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4816 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4817 IEM_MC_REF_EFLAGS(pEFlags); \
4818 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4819 \
4820 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4821 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4822 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4823 } IEM_MC_ELSE() { \
4824 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4825 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4826 } IEM_MC_ENDIF(); \
4827 IEM_MC_ADVANCE_RIP(); \
4828 IEM_MC_END(); \
4829
4830/**
4831 * @opcode 0xa6
4832 */
4833FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4834{
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4836
4837 /*
4838 * Use the C implementation if a repeat prefix is encountered.
4839 */
4840 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4841 {
4842 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4843 switch (pVCpu->iem.s.enmEffAddrMode)
4844 {
4845 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4846 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4847 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4849 }
4850 }
4851 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4852 {
4853 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4854 switch (pVCpu->iem.s.enmEffAddrMode)
4855 {
4856 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4857 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4858 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4860 }
4861 }
4862 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4863
4864 /*
4865 * Sharing case implementation with cmps[wdq] below.
4866 */
4867 switch (pVCpu->iem.s.enmEffAddrMode)
4868 {
4869 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4870 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4871 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4873 }
4874 return VINF_SUCCESS;
4875
4876}
4877
4878
4879/**
4880 * @opcode 0xa7
4881 */
4882FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4883{
4884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4885
4886 /*
4887 * Use the C implementation if a repeat prefix is encountered.
4888 */
4889 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4890 {
4891 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4892 switch (pVCpu->iem.s.enmEffOpSize)
4893 {
4894 case IEMMODE_16BIT:
4895 switch (pVCpu->iem.s.enmEffAddrMode)
4896 {
4897 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4898 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4899 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 break;
4903 case IEMMODE_32BIT:
4904 switch (pVCpu->iem.s.enmEffAddrMode)
4905 {
4906 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4907 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4908 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4910 }
4911 case IEMMODE_64BIT:
4912 switch (pVCpu->iem.s.enmEffAddrMode)
4913 {
4914 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4915 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4916 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4918 }
4919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4920 }
4921 }
4922
4923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4924 {
4925 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4926 switch (pVCpu->iem.s.enmEffOpSize)
4927 {
4928 case IEMMODE_16BIT:
4929 switch (pVCpu->iem.s.enmEffAddrMode)
4930 {
4931 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4932 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4933 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4935 }
4936 break;
4937 case IEMMODE_32BIT:
4938 switch (pVCpu->iem.s.enmEffAddrMode)
4939 {
4940 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4941 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4942 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4944 }
4945 case IEMMODE_64BIT:
4946 switch (pVCpu->iem.s.enmEffAddrMode)
4947 {
4948 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4949 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4950 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4952 }
4953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4954 }
4955 }
4956
4957 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4958
4959 /*
4960 * Annoying double switch here.
4961 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4962 */
4963 switch (pVCpu->iem.s.enmEffOpSize)
4964 {
4965 case IEMMODE_16BIT:
4966 switch (pVCpu->iem.s.enmEffAddrMode)
4967 {
4968 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4969 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4970 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4972 }
4973 break;
4974
4975 case IEMMODE_32BIT:
4976 switch (pVCpu->iem.s.enmEffAddrMode)
4977 {
4978 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4979 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4980 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4982 }
4983 break;
4984
4985 case IEMMODE_64BIT:
4986 switch (pVCpu->iem.s.enmEffAddrMode)
4987 {
4988 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4989 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4990 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4992 }
4993 break;
4994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4995 }
4996 return VINF_SUCCESS;
4997
4998}
4999
5000#undef IEM_CMPS_CASE
5001
5002/**
5003 * @opcode 0xa8
5004 */
5005FNIEMOP_DEF(iemOp_test_AL_Ib)
5006{
5007 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5008 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5009 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5010}
5011
5012
5013/**
5014 * @opcode 0xa9
5015 */
5016FNIEMOP_DEF(iemOp_test_eAX_Iz)
5017{
5018 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5019 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5020 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5021}
5022
5023
5024/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5025#define IEM_STOS_CASE(ValBits, AddrBits) \
5026 IEM_MC_BEGIN(0, 2); \
5027 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5028 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5029 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5030 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5031 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5033 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5034 } IEM_MC_ELSE() { \
5035 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5036 } IEM_MC_ENDIF(); \
5037 IEM_MC_ADVANCE_RIP(); \
5038 IEM_MC_END(); \
5039
5040/**
5041 * @opcode 0xaa
5042 */
5043FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5044{
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046
5047 /*
5048 * Use the C implementation if a repeat prefix is encountered.
5049 */
5050 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5051 {
5052 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5053 switch (pVCpu->iem.s.enmEffAddrMode)
5054 {
5055 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5056 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5057 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5059 }
5060 }
5061 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5062
5063 /*
5064 * Sharing case implementation with stos[wdq] below.
5065 */
5066 switch (pVCpu->iem.s.enmEffAddrMode)
5067 {
5068 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5069 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5070 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5072 }
5073 return VINF_SUCCESS;
5074}
5075
5076
5077/**
5078 * @opcode 0xab
5079 */
5080FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5081{
5082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5083
5084 /*
5085 * Use the C implementation if a repeat prefix is encountered.
5086 */
5087 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5088 {
5089 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5090 switch (pVCpu->iem.s.enmEffOpSize)
5091 {
5092 case IEMMODE_16BIT:
5093 switch (pVCpu->iem.s.enmEffAddrMode)
5094 {
5095 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5096 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5097 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5099 }
5100 break;
5101 case IEMMODE_32BIT:
5102 switch (pVCpu->iem.s.enmEffAddrMode)
5103 {
5104 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5105 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5106 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5108 }
5109 case IEMMODE_64BIT:
5110 switch (pVCpu->iem.s.enmEffAddrMode)
5111 {
5112 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5113 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5114 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5116 }
5117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5118 }
5119 }
5120 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5121
5122 /*
5123 * Annoying double switch here.
5124 * Using ugly macro for implementing the cases, sharing it with stosb.
5125 */
5126 switch (pVCpu->iem.s.enmEffOpSize)
5127 {
5128 case IEMMODE_16BIT:
5129 switch (pVCpu->iem.s.enmEffAddrMode)
5130 {
5131 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5132 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5133 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5135 }
5136 break;
5137
5138 case IEMMODE_32BIT:
5139 switch (pVCpu->iem.s.enmEffAddrMode)
5140 {
5141 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5142 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5143 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5145 }
5146 break;
5147
5148 case IEMMODE_64BIT:
5149 switch (pVCpu->iem.s.enmEffAddrMode)
5150 {
5151 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5152 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5153 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5155 }
5156 break;
5157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5158 }
5159 return VINF_SUCCESS;
5160}
5161
5162#undef IEM_STOS_CASE
5163
5164/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5165#define IEM_LODS_CASE(ValBits, AddrBits) \
5166 IEM_MC_BEGIN(0, 2); \
5167 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5168 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5169 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5170 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5171 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5173 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5174 } IEM_MC_ELSE() { \
5175 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5176 } IEM_MC_ENDIF(); \
5177 IEM_MC_ADVANCE_RIP(); \
5178 IEM_MC_END();
5179
5180/**
5181 * @opcode 0xac
5182 */
5183FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5184{
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186
5187 /*
5188 * Use the C implementation if a repeat prefix is encountered.
5189 */
5190 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5191 {
5192 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5193 switch (pVCpu->iem.s.enmEffAddrMode)
5194 {
5195 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5196 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5197 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5199 }
5200 }
5201 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5202
5203 /*
5204 * Sharing case implementation with stos[wdq] below.
5205 */
5206 switch (pVCpu->iem.s.enmEffAddrMode)
5207 {
5208 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5209 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5210 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213 return VINF_SUCCESS;
5214}
5215
5216
5217/**
5218 * @opcode 0xad
5219 */
5220FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5221{
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223
5224 /*
5225 * Use the C implementation if a repeat prefix is encountered.
5226 */
5227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5228 {
5229 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5230 switch (pVCpu->iem.s.enmEffOpSize)
5231 {
5232 case IEMMODE_16BIT:
5233 switch (pVCpu->iem.s.enmEffAddrMode)
5234 {
5235 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5236 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5237 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5239 }
5240 break;
5241 case IEMMODE_32BIT:
5242 switch (pVCpu->iem.s.enmEffAddrMode)
5243 {
5244 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5245 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5246 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5248 }
5249 case IEMMODE_64BIT:
5250 switch (pVCpu->iem.s.enmEffAddrMode)
5251 {
5252 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5253 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5254 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5256 }
5257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5258 }
5259 }
5260 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5261
5262 /*
5263 * Annoying double switch here.
5264 * Using ugly macro for implementing the cases, sharing it with lodsb.
5265 */
5266 switch (pVCpu->iem.s.enmEffOpSize)
5267 {
5268 case IEMMODE_16BIT:
5269 switch (pVCpu->iem.s.enmEffAddrMode)
5270 {
5271 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5272 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5273 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 break;
5277
5278 case IEMMODE_32BIT:
5279 switch (pVCpu->iem.s.enmEffAddrMode)
5280 {
5281 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5282 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5283 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5285 }
5286 break;
5287
5288 case IEMMODE_64BIT:
5289 switch (pVCpu->iem.s.enmEffAddrMode)
5290 {
5291 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5292 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5293 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5295 }
5296 break;
5297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5298 }
5299 return VINF_SUCCESS;
5300}
5301
5302#undef IEM_LODS_CASE
5303
5304/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5305#define IEM_SCAS_CASE(ValBits, AddrBits) \
5306 IEM_MC_BEGIN(3, 2); \
5307 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5308 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5309 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5310 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5311 \
5312 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5313 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5314 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5315 IEM_MC_REF_EFLAGS(pEFlags); \
5316 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5317 \
5318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5319 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5320 } IEM_MC_ELSE() { \
5321 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5322 } IEM_MC_ENDIF(); \
5323 IEM_MC_ADVANCE_RIP(); \
5324 IEM_MC_END();
5325
5326/**
5327 * @opcode 0xae
5328 */
5329FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5330{
5331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5332
5333 /*
5334 * Use the C implementation if a repeat prefix is encountered.
5335 */
5336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5337 {
5338 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5339 switch (pVCpu->iem.s.enmEffAddrMode)
5340 {
5341 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5342 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5343 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 }
5347 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5348 {
5349 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5350 switch (pVCpu->iem.s.enmEffAddrMode)
5351 {
5352 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5353 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5354 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5356 }
5357 }
5358 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5359
5360 /*
5361 * Sharing case implementation with stos[wdq] below.
5362 */
5363 switch (pVCpu->iem.s.enmEffAddrMode)
5364 {
5365 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5366 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5367 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5369 }
5370 return VINF_SUCCESS;
5371}
5372
5373
5374/**
5375 * @opcode 0xaf
5376 */
5377FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5378{
5379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5380
5381 /*
5382 * Use the C implementation if a repeat prefix is encountered.
5383 */
5384 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5385 {
5386 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5387 switch (pVCpu->iem.s.enmEffOpSize)
5388 {
5389 case IEMMODE_16BIT:
5390 switch (pVCpu->iem.s.enmEffAddrMode)
5391 {
5392 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5393 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5394 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5396 }
5397 break;
5398 case IEMMODE_32BIT:
5399 switch (pVCpu->iem.s.enmEffAddrMode)
5400 {
5401 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5402 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5403 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5405 }
5406 case IEMMODE_64BIT:
5407 switch (pVCpu->iem.s.enmEffAddrMode)
5408 {
5409 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5410 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5411 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5413 }
5414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5415 }
5416 }
5417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5418 {
5419 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5420 switch (pVCpu->iem.s.enmEffOpSize)
5421 {
5422 case IEMMODE_16BIT:
5423 switch (pVCpu->iem.s.enmEffAddrMode)
5424 {
5425 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5426 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5427 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 break;
5431 case IEMMODE_32BIT:
5432 switch (pVCpu->iem.s.enmEffAddrMode)
5433 {
5434 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5435 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5436 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439 case IEMMODE_64BIT:
5440 switch (pVCpu->iem.s.enmEffAddrMode)
5441 {
5442 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5443 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5444 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5446 }
5447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5448 }
5449 }
5450 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5451
5452 /*
5453 * Annoying double switch here.
5454 * Using ugly macro for implementing the cases, sharing it with scasb.
5455 */
5456 switch (pVCpu->iem.s.enmEffOpSize)
5457 {
5458 case IEMMODE_16BIT:
5459 switch (pVCpu->iem.s.enmEffAddrMode)
5460 {
5461 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5462 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5463 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5465 }
5466 break;
5467
5468 case IEMMODE_32BIT:
5469 switch (pVCpu->iem.s.enmEffAddrMode)
5470 {
5471 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5472 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5473 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5475 }
5476 break;
5477
5478 case IEMMODE_64BIT:
5479 switch (pVCpu->iem.s.enmEffAddrMode)
5480 {
5481 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5482 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5483 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5485 }
5486 break;
5487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5488 }
5489 return VINF_SUCCESS;
5490}
5491
5492#undef IEM_SCAS_CASE
5493
5494/**
5495 * Common 'mov r8, imm8' helper.
5496 */
5497FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5498{
5499 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501
5502 IEM_MC_BEGIN(0, 1);
5503 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5504 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507
5508 return VINF_SUCCESS;
5509}
5510
5511
5512/**
5513 * @opcode 0xb0
5514 */
5515FNIEMOP_DEF(iemOp_mov_AL_Ib)
5516{
5517 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5518 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5519}
5520
5521
5522/**
5523 * @opcode 0xb1
5524 */
5525FNIEMOP_DEF(iemOp_CL_Ib)
5526{
5527 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5528 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5529}
5530
5531
5532/**
5533 * @opcode 0xb2
5534 */
5535FNIEMOP_DEF(iemOp_DL_Ib)
5536{
5537 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5538 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5539}
5540
5541
5542/**
5543 * @opcode 0xb3
5544 */
5545FNIEMOP_DEF(iemOp_BL_Ib)
5546{
5547 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5548 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5549}
5550
5551
5552/**
5553 * @opcode 0xb4
5554 */
5555FNIEMOP_DEF(iemOp_mov_AH_Ib)
5556{
5557 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5558 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5559}
5560
5561
5562/**
5563 * @opcode 0xb5
5564 */
5565FNIEMOP_DEF(iemOp_CH_Ib)
5566{
5567 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5568 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5569}
5570
5571
5572/**
5573 * @opcode 0xb6
5574 */
5575FNIEMOP_DEF(iemOp_DH_Ib)
5576{
5577 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5578 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5579}
5580
5581
5582/**
5583 * @opcode 0xb7
5584 */
5585FNIEMOP_DEF(iemOp_BH_Ib)
5586{
5587 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5588 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5589}
5590
5591
5592/**
5593 * Common 'mov regX,immX' helper.
5594 */
5595FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5596{
5597 switch (pVCpu->iem.s.enmEffOpSize)
5598 {
5599 case IEMMODE_16BIT:
5600 {
5601 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603
5604 IEM_MC_BEGIN(0, 1);
5605 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5606 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5607 IEM_MC_ADVANCE_RIP();
5608 IEM_MC_END();
5609 break;
5610 }
5611
5612 case IEMMODE_32BIT:
5613 {
5614 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5616
5617 IEM_MC_BEGIN(0, 1);
5618 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5619 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 break;
5623 }
5624 case IEMMODE_64BIT:
5625 {
5626 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5628
5629 IEM_MC_BEGIN(0, 1);
5630 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5631 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5632 IEM_MC_ADVANCE_RIP();
5633 IEM_MC_END();
5634 break;
5635 }
5636 }
5637
5638 return VINF_SUCCESS;
5639}
5640
5641
5642/**
5643 * @opcode 0xb8
5644 */
5645FNIEMOP_DEF(iemOp_eAX_Iv)
5646{
5647 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5648 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5649}
5650
5651
5652/**
5653 * @opcode 0xb9
5654 */
5655FNIEMOP_DEF(iemOp_eCX_Iv)
5656{
5657 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5658 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5659}
5660
5661
5662/**
5663 * @opcode 0xba
5664 */
5665FNIEMOP_DEF(iemOp_eDX_Iv)
5666{
5667 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5668 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5669}
5670
5671
5672/**
5673 * @opcode 0xbb
5674 */
5675FNIEMOP_DEF(iemOp_eBX_Iv)
5676{
5677 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5678 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5679}
5680
5681
5682/**
5683 * @opcode 0xbc
5684 */
5685FNIEMOP_DEF(iemOp_eSP_Iv)
5686{
5687 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5688 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5689}
5690
5691
5692/**
5693 * @opcode 0xbd
5694 */
5695FNIEMOP_DEF(iemOp_eBP_Iv)
5696{
5697 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5698 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5699}
5700
5701
5702/**
5703 * @opcode 0xbe
5704 */
5705FNIEMOP_DEF(iemOp_eSI_Iv)
5706{
5707 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5708 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5709}
5710
5711
5712/**
5713 * @opcode 0xbf
5714 */
5715FNIEMOP_DEF(iemOp_eDI_Iv)
5716{
5717 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5718 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5719}
5720
5721
5722/**
5723 * @opcode 0xc0
5724 */
5725FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5726{
5727 IEMOP_HLP_MIN_186();
5728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5729 PCIEMOPSHIFTSIZES pImpl;
5730 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5731 {
5732 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5733 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5734 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5735 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5736 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5737 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5738 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5739 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5741 }
5742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5743
5744 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5745 {
5746 /* register */
5747 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5749 IEM_MC_BEGIN(3, 0);
5750 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5751 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5752 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5753 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5754 IEM_MC_REF_EFLAGS(pEFlags);
5755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5756 IEM_MC_ADVANCE_RIP();
5757 IEM_MC_END();
5758 }
5759 else
5760 {
5761 /* memory */
5762 IEM_MC_BEGIN(3, 2);
5763 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5764 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5765 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5767
5768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5769 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5770 IEM_MC_ASSIGN(cShiftArg, cShift);
5771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5772 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5773 IEM_MC_FETCH_EFLAGS(EFlags);
5774 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5775
5776 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5777 IEM_MC_COMMIT_EFLAGS(EFlags);
5778 IEM_MC_ADVANCE_RIP();
5779 IEM_MC_END();
5780 }
5781 return VINF_SUCCESS;
5782}
5783
5784
5785/**
5786 * @opcode 0xc1
5787 */
5788FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5789{
5790 IEMOP_HLP_MIN_186();
5791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5792 PCIEMOPSHIFTSIZES pImpl;
5793 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5794 {
5795 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5796 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5797 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5798 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5799 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5800 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5801 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5802 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5803 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5804 }
5805 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5806
5807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5808 {
5809 /* register */
5810 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 switch (pVCpu->iem.s.enmEffOpSize)
5813 {
5814 case IEMMODE_16BIT:
5815 IEM_MC_BEGIN(3, 0);
5816 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5817 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5818 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5819 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5820 IEM_MC_REF_EFLAGS(pEFlags);
5821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5822 IEM_MC_ADVANCE_RIP();
5823 IEM_MC_END();
5824 return VINF_SUCCESS;
5825
5826 case IEMMODE_32BIT:
5827 IEM_MC_BEGIN(3, 0);
5828 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5829 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5830 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5831 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5832 IEM_MC_REF_EFLAGS(pEFlags);
5833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5834 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5835 IEM_MC_ADVANCE_RIP();
5836 IEM_MC_END();
5837 return VINF_SUCCESS;
5838
5839 case IEMMODE_64BIT:
5840 IEM_MC_BEGIN(3, 0);
5841 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5842 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5843 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5844 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5845 IEM_MC_REF_EFLAGS(pEFlags);
5846 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5847 IEM_MC_ADVANCE_RIP();
5848 IEM_MC_END();
5849 return VINF_SUCCESS;
5850
5851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5852 }
5853 }
5854 else
5855 {
5856 /* memory */
5857 switch (pVCpu->iem.s.enmEffOpSize)
5858 {
5859 case IEMMODE_16BIT:
5860 IEM_MC_BEGIN(3, 2);
5861 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5862 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5863 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5865
5866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5867 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5868 IEM_MC_ASSIGN(cShiftArg, cShift);
5869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5870 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5871 IEM_MC_FETCH_EFLAGS(EFlags);
5872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5873
5874 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5875 IEM_MC_COMMIT_EFLAGS(EFlags);
5876 IEM_MC_ADVANCE_RIP();
5877 IEM_MC_END();
5878 return VINF_SUCCESS;
5879
5880 case IEMMODE_32BIT:
5881 IEM_MC_BEGIN(3, 2);
5882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5883 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5884 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5886
5887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5888 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5889 IEM_MC_ASSIGN(cShiftArg, cShift);
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5892 IEM_MC_FETCH_EFLAGS(EFlags);
5893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5894
5895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5896 IEM_MC_COMMIT_EFLAGS(EFlags);
5897 IEM_MC_ADVANCE_RIP();
5898 IEM_MC_END();
5899 return VINF_SUCCESS;
5900
5901 case IEMMODE_64BIT:
5902 IEM_MC_BEGIN(3, 2);
5903 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5904 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5905 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5907
5908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5909 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5910 IEM_MC_ASSIGN(cShiftArg, cShift);
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5913 IEM_MC_FETCH_EFLAGS(EFlags);
5914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5915
5916 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5917 IEM_MC_COMMIT_EFLAGS(EFlags);
5918 IEM_MC_ADVANCE_RIP();
5919 IEM_MC_END();
5920 return VINF_SUCCESS;
5921
5922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5923 }
5924 }
5925}
5926
5927
5928/**
5929 * @opcode 0xc2
5930 */
5931FNIEMOP_DEF(iemOp_retn_Iw)
5932{
5933 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5934 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5936 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5937 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5938}
5939
5940
5941/**
5942 * @opcode 0xc3
5943 */
5944FNIEMOP_DEF(iemOp_retn)
5945{
5946 IEMOP_MNEMONIC(retn, "retn");
5947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5950}
5951
5952
5953/**
5954 * @opcode 0xc4
5955 */
5956FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5957{
5958 /* The LES instruction is invalid 64-bit mode. In legacy and
5959 compatability mode it is invalid with MOD=3.
5960 The use as a VEX prefix is made possible by assigning the inverted
5961 REX.R to the top MOD bit, and the top bit in the inverted register
5962 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5963 to accessing registers 0..7 in this VEX form. */
5964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5965 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5966 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5967 {
5968 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5969 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5970 {
5971 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5972 if ( ( pVCpu->iem.s.fPrefixes
5973 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5974 == 0)
5975 {
5976 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5977 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5978 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5979 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5980 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5981
5982 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5983 }
5984
5985 Log(("VEX2: Invalid prefix mix!\n"));
5986 }
5987 else
5988 Log(("VEX2: AVX support disabled!\n"));
5989
5990 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5991 return IEMOP_RAISE_INVALID_OPCODE();
5992 }
5993 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5994 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5995}
5996
5997
5998/**
5999 * @opcode 0xc5
6000 */
6001FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
6002{
6003 /* The LDS instruction is invalid 64-bit mode. In legacy and
6004 compatability mode it is invalid with MOD=3.
6005 The use as a VEX prefix is made possible by assigning the inverted
6006 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6007 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6009 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
6010 {
6011 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6012 {
6013 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6014 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6015 }
6016 IEMOP_HLP_NO_REAL_OR_V86_MODE();
6017 }
6018
6019 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6020 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6021 {
6022 /** @todo Test when exctly the VEX conformance checks kick in during
6023 * instruction decoding and fetching (using \#PF). */
6024 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6025 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6026 if ( ( pVCpu->iem.s.fPrefixes
6027 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6028 == 0)
6029 {
6030 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6031 if (bVex2 & 0x80 /* VEX.W */)
6032 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6033 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6034 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6035 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6036 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6037 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6038 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6039
6040 switch (bRm & 0x1f)
6041 {
6042 case 1: /* 0x0f lead opcode byte. */
6043 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6044
6045 case 2: /* 0x0f 0x38 lead opcode bytes. */
6046 /** @todo VEX: Just use new tables and decoders. */
6047 IEMOP_BITCH_ABOUT_STUB();
6048 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6049
6050 case 3: /* 0x0f 0x3a lead opcode bytes. */
6051 /** @todo VEX: Just use new tables and decoders. */
6052 IEMOP_BITCH_ABOUT_STUB();
6053 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6054
6055 default:
6056 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6057 return IEMOP_RAISE_INVALID_OPCODE();
6058 }
6059 }
6060 else
6061 Log(("VEX3: Invalid prefix mix!\n"));
6062 }
6063 else
6064 Log(("VEX3: AVX support disabled!\n"));
6065 return IEMOP_RAISE_INVALID_OPCODE();
6066}
6067
6068
6069/**
6070 * @opcode 0xc6
6071 */
6072FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6073{
6074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6075 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6076 return IEMOP_RAISE_INVALID_OPCODE();
6077 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6078
6079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6080 {
6081 /* register access */
6082 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6084 IEM_MC_BEGIN(0, 0);
6085 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6086 IEM_MC_ADVANCE_RIP();
6087 IEM_MC_END();
6088 }
6089 else
6090 {
6091 /* memory access. */
6092 IEM_MC_BEGIN(0, 1);
6093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6095 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6097 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6098 IEM_MC_ADVANCE_RIP();
6099 IEM_MC_END();
6100 }
6101 return VINF_SUCCESS;
6102}
6103
6104
6105/**
6106 * @opcode 0xc7
6107 */
6108FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6109{
6110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6111 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6112 return IEMOP_RAISE_INVALID_OPCODE();
6113 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6114
6115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6116 {
6117 /* register access */
6118 switch (pVCpu->iem.s.enmEffOpSize)
6119 {
6120 case IEMMODE_16BIT:
6121 IEM_MC_BEGIN(0, 0);
6122 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6124 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6125 IEM_MC_ADVANCE_RIP();
6126 IEM_MC_END();
6127 return VINF_SUCCESS;
6128
6129 case IEMMODE_32BIT:
6130 IEM_MC_BEGIN(0, 0);
6131 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6133 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6134 IEM_MC_ADVANCE_RIP();
6135 IEM_MC_END();
6136 return VINF_SUCCESS;
6137
6138 case IEMMODE_64BIT:
6139 IEM_MC_BEGIN(0, 0);
6140 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6142 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6143 IEM_MC_ADVANCE_RIP();
6144 IEM_MC_END();
6145 return VINF_SUCCESS;
6146
6147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6148 }
6149 }
6150 else
6151 {
6152 /* memory access. */
6153 switch (pVCpu->iem.s.enmEffOpSize)
6154 {
6155 case IEMMODE_16BIT:
6156 IEM_MC_BEGIN(0, 1);
6157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6159 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6161 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6162 IEM_MC_ADVANCE_RIP();
6163 IEM_MC_END();
6164 return VINF_SUCCESS;
6165
6166 case IEMMODE_32BIT:
6167 IEM_MC_BEGIN(0, 1);
6168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6170 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 return VINF_SUCCESS;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(0, 1);
6179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6181 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6184 IEM_MC_ADVANCE_RIP();
6185 IEM_MC_END();
6186 return VINF_SUCCESS;
6187
6188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6189 }
6190 }
6191}
6192
6193
6194
6195
6196/**
6197 * @opcode 0xc8
6198 */
6199FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6200{
6201 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6202 IEMOP_HLP_MIN_186();
6203 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6204 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6205 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6208}
6209
6210
6211/**
6212 * @opcode 0xc9
6213 */
6214FNIEMOP_DEF(iemOp_leave)
6215{
6216 IEMOP_MNEMONIC(leave, "leave");
6217 IEMOP_HLP_MIN_186();
6218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6221}
6222
6223
6224/**
6225 * @opcode 0xca
6226 */
6227FNIEMOP_DEF(iemOp_retf_Iw)
6228{
6229 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6230 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6233 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6234}
6235
6236
6237/**
6238 * @opcode 0xcb
6239 */
6240FNIEMOP_DEF(iemOp_retf)
6241{
6242 IEMOP_MNEMONIC(retf, "retf");
6243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6244 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6245 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6246}
6247
6248
6249/**
6250 * @opcode 0xcc
6251 */
6252FNIEMOP_DEF(iemOp_int3)
6253{
6254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6255 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6256}
6257
6258
6259/**
6260 * @opcode 0xcd
6261 */
6262FNIEMOP_DEF(iemOp_int_Ib)
6263{
6264 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6266 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6267}
6268
6269
6270/**
6271 * @opcode 0xce
6272 */
6273FNIEMOP_DEF(iemOp_into)
6274{
6275 IEMOP_MNEMONIC(into, "into");
6276 IEMOP_HLP_NO_64BIT();
6277
6278 IEM_MC_BEGIN(2, 0);
6279 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6280 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6281 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6282 IEM_MC_END();
6283 return VINF_SUCCESS;
6284}
6285
6286
6287/**
6288 * @opcode 0xcf
6289 */
6290FNIEMOP_DEF(iemOp_iret)
6291{
6292 IEMOP_MNEMONIC(iret, "iret");
6293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6294 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6295}
6296
6297
6298/**
6299 * @opcode 0xd0
6300 */
6301FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6302{
6303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6304 PCIEMOPSHIFTSIZES pImpl;
6305 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6306 {
6307 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6308 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6309 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6310 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6311 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6312 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6313 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6314 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6315 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6316 }
6317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6318
6319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6320 {
6321 /* register */
6322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6323 IEM_MC_BEGIN(3, 0);
6324 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6325 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6326 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6327 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6328 IEM_MC_REF_EFLAGS(pEFlags);
6329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6330 IEM_MC_ADVANCE_RIP();
6331 IEM_MC_END();
6332 }
6333 else
6334 {
6335 /* memory */
6336 IEM_MC_BEGIN(3, 2);
6337 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6338 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6339 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6341
6342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6344 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6345 IEM_MC_FETCH_EFLAGS(EFlags);
6346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6347
6348 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6349 IEM_MC_COMMIT_EFLAGS(EFlags);
6350 IEM_MC_ADVANCE_RIP();
6351 IEM_MC_END();
6352 }
6353 return VINF_SUCCESS;
6354}
6355
6356
6357
6358/**
6359 * @opcode 0xd1
6360 */
6361FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6362{
6363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6364 PCIEMOPSHIFTSIZES pImpl;
6365 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6366 {
6367 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6368 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6369 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6370 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6371 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6372 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6373 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6374 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6375 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6376 }
6377 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6378
6379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6380 {
6381 /* register */
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6383 switch (pVCpu->iem.s.enmEffOpSize)
6384 {
6385 case IEMMODE_16BIT:
6386 IEM_MC_BEGIN(3, 0);
6387 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6388 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6390 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6391 IEM_MC_REF_EFLAGS(pEFlags);
6392 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6393 IEM_MC_ADVANCE_RIP();
6394 IEM_MC_END();
6395 return VINF_SUCCESS;
6396
6397 case IEMMODE_32BIT:
6398 IEM_MC_BEGIN(3, 0);
6399 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6400 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6401 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6402 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6403 IEM_MC_REF_EFLAGS(pEFlags);
6404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6405 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6406 IEM_MC_ADVANCE_RIP();
6407 IEM_MC_END();
6408 return VINF_SUCCESS;
6409
6410 case IEMMODE_64BIT:
6411 IEM_MC_BEGIN(3, 0);
6412 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6413 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6414 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6415 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6416 IEM_MC_REF_EFLAGS(pEFlags);
6417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 return VINF_SUCCESS;
6421
6422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6423 }
6424 }
6425 else
6426 {
6427 /* memory */
6428 switch (pVCpu->iem.s.enmEffOpSize)
6429 {
6430 case IEMMODE_16BIT:
6431 IEM_MC_BEGIN(3, 2);
6432 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6433 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6436
6437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6440 IEM_MC_FETCH_EFLAGS(EFlags);
6441 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6442
6443 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6444 IEM_MC_COMMIT_EFLAGS(EFlags);
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 return VINF_SUCCESS;
6448
6449 case IEMMODE_32BIT:
6450 IEM_MC_BEGIN(3, 2);
6451 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6452 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6453 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6455
6456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6459 IEM_MC_FETCH_EFLAGS(EFlags);
6460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6461
6462 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6463 IEM_MC_COMMIT_EFLAGS(EFlags);
6464 IEM_MC_ADVANCE_RIP();
6465 IEM_MC_END();
6466 return VINF_SUCCESS;
6467
6468 case IEMMODE_64BIT:
6469 IEM_MC_BEGIN(3, 2);
6470 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6471 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6472 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6474
6475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6477 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6478 IEM_MC_FETCH_EFLAGS(EFlags);
6479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6480
6481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6482 IEM_MC_COMMIT_EFLAGS(EFlags);
6483 IEM_MC_ADVANCE_RIP();
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486
6487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6488 }
6489 }
6490}
6491
6492
6493/**
6494 * @opcode 0xd2
6495 */
6496FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6497{
6498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6499 PCIEMOPSHIFTSIZES pImpl;
6500 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6501 {
6502 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6503 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6504 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6505 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6506 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6507 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6508 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6509 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6510 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6511 }
6512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6513
6514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6515 {
6516 /* register */
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6518 IEM_MC_BEGIN(3, 0);
6519 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6520 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6522 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6523 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6524 IEM_MC_REF_EFLAGS(pEFlags);
6525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 }
6529 else
6530 {
6531 /* memory */
6532 IEM_MC_BEGIN(3, 2);
6533 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6534 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6535 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6537
6538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6541 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6542 IEM_MC_FETCH_EFLAGS(EFlags);
6543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6544
6545 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6546 IEM_MC_COMMIT_EFLAGS(EFlags);
6547 IEM_MC_ADVANCE_RIP();
6548 IEM_MC_END();
6549 }
6550 return VINF_SUCCESS;
6551}
6552
6553
6554/**
6555 * @opcode 0xd3
6556 */
6557FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6558{
6559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6560 PCIEMOPSHIFTSIZES pImpl;
6561 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6562 {
6563 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6564 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6565 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6566 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6567 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6568 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6569 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6570 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6571 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6572 }
6573 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6574
6575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6576 {
6577 /* register */
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 switch (pVCpu->iem.s.enmEffOpSize)
6580 {
6581 case IEMMODE_16BIT:
6582 IEM_MC_BEGIN(3, 0);
6583 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6584 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6585 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6586 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6587 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6588 IEM_MC_REF_EFLAGS(pEFlags);
6589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6590 IEM_MC_ADVANCE_RIP();
6591 IEM_MC_END();
6592 return VINF_SUCCESS;
6593
6594 case IEMMODE_32BIT:
6595 IEM_MC_BEGIN(3, 0);
6596 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6597 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6598 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6599 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6600 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6601 IEM_MC_REF_EFLAGS(pEFlags);
6602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 return VINF_SUCCESS;
6607
6608 case IEMMODE_64BIT:
6609 IEM_MC_BEGIN(3, 0);
6610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6611 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6613 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6614 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6615 IEM_MC_REF_EFLAGS(pEFlags);
6616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6617 IEM_MC_ADVANCE_RIP();
6618 IEM_MC_END();
6619 return VINF_SUCCESS;
6620
6621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6622 }
6623 }
6624 else
6625 {
6626 /* memory */
6627 switch (pVCpu->iem.s.enmEffOpSize)
6628 {
6629 case IEMMODE_16BIT:
6630 IEM_MC_BEGIN(3, 2);
6631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6632 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6633 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6635
6636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6638 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6639 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6640 IEM_MC_FETCH_EFLAGS(EFlags);
6641 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6642
6643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6644 IEM_MC_COMMIT_EFLAGS(EFlags);
6645 IEM_MC_ADVANCE_RIP();
6646 IEM_MC_END();
6647 return VINF_SUCCESS;
6648
6649 case IEMMODE_32BIT:
6650 IEM_MC_BEGIN(3, 2);
6651 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6652 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6653 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6655
6656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6658 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6659 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6660 IEM_MC_FETCH_EFLAGS(EFlags);
6661 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6662
6663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6664 IEM_MC_COMMIT_EFLAGS(EFlags);
6665 IEM_MC_ADVANCE_RIP();
6666 IEM_MC_END();
6667 return VINF_SUCCESS;
6668
6669 case IEMMODE_64BIT:
6670 IEM_MC_BEGIN(3, 2);
6671 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6672 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6673 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6675
6676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6678 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6679 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6680 IEM_MC_FETCH_EFLAGS(EFlags);
6681 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6682
6683 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6684 IEM_MC_COMMIT_EFLAGS(EFlags);
6685 IEM_MC_ADVANCE_RIP();
6686 IEM_MC_END();
6687 return VINF_SUCCESS;
6688
6689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6690 }
6691 }
6692}
6693
6694/**
6695 * @opcode 0xd4
6696 */
6697FNIEMOP_DEF(iemOp_aam_Ib)
6698{
6699 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6700 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6702 IEMOP_HLP_NO_64BIT();
6703 if (!bImm)
6704 return IEMOP_RAISE_DIVIDE_ERROR();
6705 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6706}
6707
6708
6709/**
6710 * @opcode 0xd5
6711 */
6712FNIEMOP_DEF(iemOp_aad_Ib)
6713{
6714 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6715 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6717 IEMOP_HLP_NO_64BIT();
6718 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6719}
6720
6721
6722/**
6723 * @opcode 0xd6
6724 */
6725FNIEMOP_DEF(iemOp_salc)
6726{
6727 IEMOP_MNEMONIC(salc, "salc");
6728 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6729 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6731 IEMOP_HLP_NO_64BIT();
6732
6733 IEM_MC_BEGIN(0, 0);
6734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6735 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6736 } IEM_MC_ELSE() {
6737 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6738 } IEM_MC_ENDIF();
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 return VINF_SUCCESS;
6742}
6743
6744
6745/**
6746 * @opcode 0xd7
6747 */
6748FNIEMOP_DEF(iemOp_xlat)
6749{
6750 IEMOP_MNEMONIC(xlat, "xlat");
6751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6752 switch (pVCpu->iem.s.enmEffAddrMode)
6753 {
6754 case IEMMODE_16BIT:
6755 IEM_MC_BEGIN(2, 0);
6756 IEM_MC_LOCAL(uint8_t, u8Tmp);
6757 IEM_MC_LOCAL(uint16_t, u16Addr);
6758 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6759 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6760 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6761 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6762 IEM_MC_ADVANCE_RIP();
6763 IEM_MC_END();
6764 return VINF_SUCCESS;
6765
6766 case IEMMODE_32BIT:
6767 IEM_MC_BEGIN(2, 0);
6768 IEM_MC_LOCAL(uint8_t, u8Tmp);
6769 IEM_MC_LOCAL(uint32_t, u32Addr);
6770 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6771 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6772 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6773 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6774 IEM_MC_ADVANCE_RIP();
6775 IEM_MC_END();
6776 return VINF_SUCCESS;
6777
6778 case IEMMODE_64BIT:
6779 IEM_MC_BEGIN(2, 0);
6780 IEM_MC_LOCAL(uint8_t, u8Tmp);
6781 IEM_MC_LOCAL(uint64_t, u64Addr);
6782 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6783 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6784 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6785 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6786 IEM_MC_ADVANCE_RIP();
6787 IEM_MC_END();
6788 return VINF_SUCCESS;
6789
6790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6791 }
6792}
6793
6794
6795/**
6796 * Common worker for FPU instructions working on ST0 and STn, and storing the
6797 * result in ST0.
6798 *
6799 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6800 */
6801FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6802{
6803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6804
6805 IEM_MC_BEGIN(3, 1);
6806 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6807 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6808 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6809 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6810
6811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6813 IEM_MC_PREPARE_FPU_USAGE();
6814 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6815 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6816 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6817 IEM_MC_ELSE()
6818 IEM_MC_FPU_STACK_UNDERFLOW(0);
6819 IEM_MC_ENDIF();
6820 IEM_MC_ADVANCE_RIP();
6821
6822 IEM_MC_END();
6823 return VINF_SUCCESS;
6824}
6825
6826
6827/**
6828 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6829 * flags.
6830 *
6831 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6832 */
6833FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6834{
6835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6836
6837 IEM_MC_BEGIN(3, 1);
6838 IEM_MC_LOCAL(uint16_t, u16Fsw);
6839 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6840 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6841 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6842
6843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6845 IEM_MC_PREPARE_FPU_USAGE();
6846 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6847 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6848 IEM_MC_UPDATE_FSW(u16Fsw);
6849 IEM_MC_ELSE()
6850 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6851 IEM_MC_ENDIF();
6852 IEM_MC_ADVANCE_RIP();
6853
6854 IEM_MC_END();
6855 return VINF_SUCCESS;
6856}
6857
6858
6859/**
6860 * Common worker for FPU instructions working on ST0 and STn, only affecting
6861 * flags, and popping when done.
6862 *
6863 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6864 */
6865FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6866{
6867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6868
6869 IEM_MC_BEGIN(3, 1);
6870 IEM_MC_LOCAL(uint16_t, u16Fsw);
6871 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6872 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6874
6875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6877 IEM_MC_PREPARE_FPU_USAGE();
6878 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6879 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6880 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6881 IEM_MC_ELSE()
6882 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6883 IEM_MC_ENDIF();
6884 IEM_MC_ADVANCE_RIP();
6885
6886 IEM_MC_END();
6887 return VINF_SUCCESS;
6888}
6889
6890
6891/** Opcode 0xd8 11/0. */
6892FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6893{
6894 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6895 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6896}
6897
6898
6899/** Opcode 0xd8 11/1. */
6900FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6901{
6902 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6903 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6904}
6905
6906
6907/** Opcode 0xd8 11/2. */
6908FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6909{
6910 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6911 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6912}
6913
6914
6915/** Opcode 0xd8 11/3. */
6916FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6917{
6918 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6919 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6920}
6921
6922
6923/** Opcode 0xd8 11/4. */
6924FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6925{
6926 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6927 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6928}
6929
6930
6931/** Opcode 0xd8 11/5. */
6932FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6933{
6934 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6935 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6936}
6937
6938
6939/** Opcode 0xd8 11/6. */
6940FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6941{
6942 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6943 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6944}
6945
6946
6947/** Opcode 0xd8 11/7. */
6948FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6949{
6950 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6951 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6952}
6953
6954
6955/**
6956 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6957 * the result in ST0.
6958 *
6959 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6960 */
6961FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6962{
6963 IEM_MC_BEGIN(3, 3);
6964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6965 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6966 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6967 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6968 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6969 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6970
6971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6973
6974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6976 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6977
6978 IEM_MC_PREPARE_FPU_USAGE();
6979 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6980 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6981 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6982 IEM_MC_ELSE()
6983 IEM_MC_FPU_STACK_UNDERFLOW(0);
6984 IEM_MC_ENDIF();
6985 IEM_MC_ADVANCE_RIP();
6986
6987 IEM_MC_END();
6988 return VINF_SUCCESS;
6989}
6990
6991
6992/** Opcode 0xd8 !11/0. */
6993FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6994{
6995 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6996 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6997}
6998
6999
7000/** Opcode 0xd8 !11/1. */
7001FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7002{
7003 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7004 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7005}
7006
7007
7008/** Opcode 0xd8 !11/2. */
7009FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7010{
7011 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7012
7013 IEM_MC_BEGIN(3, 3);
7014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7015 IEM_MC_LOCAL(uint16_t, u16Fsw);
7016 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7017 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7018 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7019 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7020
7021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7023
7024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7026 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7027
7028 IEM_MC_PREPARE_FPU_USAGE();
7029 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7030 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7031 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7032 IEM_MC_ELSE()
7033 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7034 IEM_MC_ENDIF();
7035 IEM_MC_ADVANCE_RIP();
7036
7037 IEM_MC_END();
7038 return VINF_SUCCESS;
7039}
7040
7041
7042/** Opcode 0xd8 !11/3. */
7043FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7044{
7045 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7046
7047 IEM_MC_BEGIN(3, 3);
7048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7049 IEM_MC_LOCAL(uint16_t, u16Fsw);
7050 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7051 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7052 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7053 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7054
7055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7057
7058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7060 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7061
7062 IEM_MC_PREPARE_FPU_USAGE();
7063 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7064 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7065 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7066 IEM_MC_ELSE()
7067 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7068 IEM_MC_ENDIF();
7069 IEM_MC_ADVANCE_RIP();
7070
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073}
7074
7075
7076/** Opcode 0xd8 !11/4. */
7077FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7078{
7079 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7080 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7081}
7082
7083
7084/** Opcode 0xd8 !11/5. */
7085FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7086{
7087 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7088 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7089}
7090
7091
7092/** Opcode 0xd8 !11/6. */
7093FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7094{
7095 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7096 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7097}
7098
7099
7100/** Opcode 0xd8 !11/7. */
7101FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7102{
7103 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7104 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7105}
7106
7107
7108/**
7109 * @opcode 0xd8
7110 */
7111FNIEMOP_DEF(iemOp_EscF0)
7112{
7113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7114 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7115
7116 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7117 {
7118 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7119 {
7120 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7121 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7122 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7123 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7124 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7125 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7126 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7127 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7129 }
7130 }
7131 else
7132 {
7133 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7134 {
7135 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7136 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7137 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7138 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7139 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7140 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7141 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7142 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7144 }
7145 }
7146}
7147
7148
7149/** Opcode 0xd9 /0 mem32real
7150 * @sa iemOp_fld_m64r */
7151FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7152{
7153 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7154
7155 IEM_MC_BEGIN(2, 3);
7156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7157 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7158 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7159 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7160 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7161
7162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7164
7165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7166 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7167 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7168
7169 IEM_MC_PREPARE_FPU_USAGE();
7170 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7171 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7172 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7173 IEM_MC_ELSE()
7174 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7175 IEM_MC_ENDIF();
7176 IEM_MC_ADVANCE_RIP();
7177
7178 IEM_MC_END();
7179 return VINF_SUCCESS;
7180}
7181
7182
7183/** Opcode 0xd9 !11/2 mem32real */
7184FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7185{
7186 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7187 IEM_MC_BEGIN(3, 2);
7188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7189 IEM_MC_LOCAL(uint16_t, u16Fsw);
7190 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7191 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7192 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7193
7194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7197 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7198
7199 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7200 IEM_MC_PREPARE_FPU_USAGE();
7201 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7202 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7203 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7204 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7205 IEM_MC_ELSE()
7206 IEM_MC_IF_FCW_IM()
7207 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7208 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7209 IEM_MC_ENDIF();
7210 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7211 IEM_MC_ENDIF();
7212 IEM_MC_ADVANCE_RIP();
7213
7214 IEM_MC_END();
7215 return VINF_SUCCESS;
7216}
7217
7218
7219/** Opcode 0xd9 !11/3 */
7220FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7221{
7222 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7223 IEM_MC_BEGIN(3, 2);
7224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7225 IEM_MC_LOCAL(uint16_t, u16Fsw);
7226 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7227 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7228 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7229
7230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7234
7235 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7236 IEM_MC_PREPARE_FPU_USAGE();
7237 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7238 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7239 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7240 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7241 IEM_MC_ELSE()
7242 IEM_MC_IF_FCW_IM()
7243 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7244 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7245 IEM_MC_ENDIF();
7246 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7247 IEM_MC_ENDIF();
7248 IEM_MC_ADVANCE_RIP();
7249
7250 IEM_MC_END();
7251 return VINF_SUCCESS;
7252}
7253
7254
7255/** Opcode 0xd9 !11/4 */
7256FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7257{
7258 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7259 IEM_MC_BEGIN(3, 0);
7260 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7261 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7262 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7265 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7266 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7267 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7268 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7269 IEM_MC_END();
7270 return VINF_SUCCESS;
7271}
7272
7273
7274/** Opcode 0xd9 !11/5 */
7275FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7276{
7277 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7278 IEM_MC_BEGIN(1, 1);
7279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7280 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7284 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7285 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7286 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7287 IEM_MC_END();
7288 return VINF_SUCCESS;
7289}
7290
7291
7292/** Opcode 0xd9 !11/6 */
7293FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7294{
7295 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7296 IEM_MC_BEGIN(3, 0);
7297 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7298 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7299 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7303 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7304 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7305 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7306 IEM_MC_END();
7307 return VINF_SUCCESS;
7308}
7309
7310
7311/** Opcode 0xd9 !11/7 */
7312FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7313{
7314 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7315 IEM_MC_BEGIN(2, 0);
7316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7317 IEM_MC_LOCAL(uint16_t, u16Fcw);
7318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7320 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7321 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7322 IEM_MC_FETCH_FCW(u16Fcw);
7323 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7324 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7325 IEM_MC_END();
7326 return VINF_SUCCESS;
7327}
7328
7329
7330/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7331FNIEMOP_DEF(iemOp_fnop)
7332{
7333 IEMOP_MNEMONIC(fnop, "fnop");
7334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7335
7336 IEM_MC_BEGIN(0, 0);
7337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7338 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7339 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7340 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7341 * intel optimizations. Investigate. */
7342 IEM_MC_UPDATE_FPU_OPCODE_IP();
7343 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7344 IEM_MC_END();
7345 return VINF_SUCCESS;
7346}
7347
7348
7349/** Opcode 0xd9 11/0 stN */
7350FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7351{
7352 IEMOP_MNEMONIC(fld_stN, "fld stN");
7353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7354
7355 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7356 * indicates that it does. */
7357 IEM_MC_BEGIN(0, 2);
7358 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7359 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7362
7363 IEM_MC_PREPARE_FPU_USAGE();
7364 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7365 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7366 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7367 IEM_MC_ELSE()
7368 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7369 IEM_MC_ENDIF();
7370
7371 IEM_MC_ADVANCE_RIP();
7372 IEM_MC_END();
7373
7374 return VINF_SUCCESS;
7375}
7376
7377
7378/** Opcode 0xd9 11/3 stN */
7379FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7380{
7381 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7383
7384 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7385 * indicates that it does. */
7386 IEM_MC_BEGIN(1, 3);
7387 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7388 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7389 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7390 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7393
7394 IEM_MC_PREPARE_FPU_USAGE();
7395 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7396 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7397 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7398 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7399 IEM_MC_ELSE()
7400 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7401 IEM_MC_ENDIF();
7402
7403 IEM_MC_ADVANCE_RIP();
7404 IEM_MC_END();
7405
7406 return VINF_SUCCESS;
7407}
7408
7409
7410/** Opcode 0xd9 11/4, 0xdd 11/2. */
7411FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7412{
7413 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7415
7416 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7417 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7418 if (!iDstReg)
7419 {
7420 IEM_MC_BEGIN(0, 1);
7421 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7424
7425 IEM_MC_PREPARE_FPU_USAGE();
7426 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7427 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7428 IEM_MC_ELSE()
7429 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7430 IEM_MC_ENDIF();
7431
7432 IEM_MC_ADVANCE_RIP();
7433 IEM_MC_END();
7434 }
7435 else
7436 {
7437 IEM_MC_BEGIN(0, 2);
7438 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7439 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7442
7443 IEM_MC_PREPARE_FPU_USAGE();
7444 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7445 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7446 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7447 IEM_MC_ELSE()
7448 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7449 IEM_MC_ENDIF();
7450
7451 IEM_MC_ADVANCE_RIP();
7452 IEM_MC_END();
7453 }
7454 return VINF_SUCCESS;
7455}
7456
7457
7458/**
7459 * Common worker for FPU instructions working on ST0 and replaces it with the
7460 * result, i.e. unary operators.
7461 *
7462 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7463 */
7464FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7465{
7466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7467
7468 IEM_MC_BEGIN(2, 1);
7469 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7470 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7471 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7472
7473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7475 IEM_MC_PREPARE_FPU_USAGE();
7476 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7477 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7478 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7479 IEM_MC_ELSE()
7480 IEM_MC_FPU_STACK_UNDERFLOW(0);
7481 IEM_MC_ENDIF();
7482 IEM_MC_ADVANCE_RIP();
7483
7484 IEM_MC_END();
7485 return VINF_SUCCESS;
7486}
7487
7488
7489/** Opcode 0xd9 0xe0. */
7490FNIEMOP_DEF(iemOp_fchs)
7491{
7492 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7493 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7494}
7495
7496
7497/** Opcode 0xd9 0xe1. */
7498FNIEMOP_DEF(iemOp_fabs)
7499{
7500 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7501 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7502}
7503
7504
7505/**
7506 * Common worker for FPU instructions working on ST0 and only returns FSW.
7507 *
7508 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7509 */
7510FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7511{
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513
7514 IEM_MC_BEGIN(2, 1);
7515 IEM_MC_LOCAL(uint16_t, u16Fsw);
7516 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7518
7519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7521 IEM_MC_PREPARE_FPU_USAGE();
7522 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7523 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7524 IEM_MC_UPDATE_FSW(u16Fsw);
7525 IEM_MC_ELSE()
7526 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7527 IEM_MC_ENDIF();
7528 IEM_MC_ADVANCE_RIP();
7529
7530 IEM_MC_END();
7531 return VINF_SUCCESS;
7532}
7533
7534
7535/** Opcode 0xd9 0xe4. */
7536FNIEMOP_DEF(iemOp_ftst)
7537{
7538 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7539 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7540}
7541
7542
7543/** Opcode 0xd9 0xe5. */
7544FNIEMOP_DEF(iemOp_fxam)
7545{
7546 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7547 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7548}
7549
7550
7551/**
7552 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7553 *
7554 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7555 */
7556FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7557{
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7559
7560 IEM_MC_BEGIN(1, 1);
7561 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7562 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7563
7564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7565 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7566 IEM_MC_PREPARE_FPU_USAGE();
7567 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7568 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7569 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7570 IEM_MC_ELSE()
7571 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7572 IEM_MC_ENDIF();
7573 IEM_MC_ADVANCE_RIP();
7574
7575 IEM_MC_END();
7576 return VINF_SUCCESS;
7577}
7578
7579
7580/** Opcode 0xd9 0xe8. */
7581FNIEMOP_DEF(iemOp_fld1)
7582{
7583 IEMOP_MNEMONIC(fld1, "fld1");
7584 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7585}
7586
7587
7588/** Opcode 0xd9 0xe9. */
7589FNIEMOP_DEF(iemOp_fldl2t)
7590{
7591 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7592 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7593}
7594
7595
7596/** Opcode 0xd9 0xea. */
7597FNIEMOP_DEF(iemOp_fldl2e)
7598{
7599 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7600 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7601}
7602
7603/** Opcode 0xd9 0xeb. */
7604FNIEMOP_DEF(iemOp_fldpi)
7605{
7606 IEMOP_MNEMONIC(fldpi, "fldpi");
7607 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7608}
7609
7610
7611/** Opcode 0xd9 0xec. */
7612FNIEMOP_DEF(iemOp_fldlg2)
7613{
7614 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7615 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7616}
7617
7618/** Opcode 0xd9 0xed. */
7619FNIEMOP_DEF(iemOp_fldln2)
7620{
7621 IEMOP_MNEMONIC(fldln2, "fldln2");
7622 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7623}
7624
7625
7626/** Opcode 0xd9 0xee. */
7627FNIEMOP_DEF(iemOp_fldz)
7628{
7629 IEMOP_MNEMONIC(fldz, "fldz");
7630 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7631}
7632
7633
7634/** Opcode 0xd9 0xf0. */
7635FNIEMOP_DEF(iemOp_f2xm1)
7636{
7637 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7638 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7639}
7640
7641
7642/**
7643 * Common worker for FPU instructions working on STn and ST0, storing the result
7644 * in STn, and popping the stack unless IE, DE or ZE was raised.
7645 *
7646 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7647 */
7648FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7649{
7650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7651
7652 IEM_MC_BEGIN(3, 1);
7653 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7654 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7656 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7657
7658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7660
7661 IEM_MC_PREPARE_FPU_USAGE();
7662 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7663 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7664 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7665 IEM_MC_ELSE()
7666 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7667 IEM_MC_ENDIF();
7668 IEM_MC_ADVANCE_RIP();
7669
7670 IEM_MC_END();
7671 return VINF_SUCCESS;
7672}
7673
7674
7675/** Opcode 0xd9 0xf1. */
7676FNIEMOP_DEF(iemOp_fyl2x)
7677{
7678 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7679 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7680}
7681
7682
7683/**
7684 * Common worker for FPU instructions working on ST0 and having two outputs, one
7685 * replacing ST0 and one pushed onto the stack.
7686 *
7687 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7688 */
7689FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7690{
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692
7693 IEM_MC_BEGIN(2, 1);
7694 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7695 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7696 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7697
7698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7700 IEM_MC_PREPARE_FPU_USAGE();
7701 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7702 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7703 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7704 IEM_MC_ELSE()
7705 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7706 IEM_MC_ENDIF();
7707 IEM_MC_ADVANCE_RIP();
7708
7709 IEM_MC_END();
7710 return VINF_SUCCESS;
7711}
7712
7713
7714/** Opcode 0xd9 0xf2. */
7715FNIEMOP_DEF(iemOp_fptan)
7716{
7717 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7718 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7719}
7720
7721
7722/** Opcode 0xd9 0xf3. */
7723FNIEMOP_DEF(iemOp_fpatan)
7724{
7725 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7726 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7727}
7728
7729
7730/** Opcode 0xd9 0xf4. */
7731FNIEMOP_DEF(iemOp_fxtract)
7732{
7733 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7734 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7735}
7736
7737
7738/** Opcode 0xd9 0xf5. */
7739FNIEMOP_DEF(iemOp_fprem1)
7740{
7741 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7742 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7743}
7744
7745
7746/** Opcode 0xd9 0xf6. */
7747FNIEMOP_DEF(iemOp_fdecstp)
7748{
7749 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7751 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7752 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7753 * FINCSTP and FDECSTP. */
7754
7755 IEM_MC_BEGIN(0,0);
7756
7757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7759
7760 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7761 IEM_MC_FPU_STACK_DEC_TOP();
7762 IEM_MC_UPDATE_FSW_CONST(0);
7763
7764 IEM_MC_ADVANCE_RIP();
7765 IEM_MC_END();
7766 return VINF_SUCCESS;
7767}
7768
7769
7770/** Opcode 0xd9 0xf7. */
7771FNIEMOP_DEF(iemOp_fincstp)
7772{
7773 IEMOP_MNEMONIC(fincstp, "fincstp");
7774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7775 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7776 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7777 * FINCSTP and FDECSTP. */
7778
7779 IEM_MC_BEGIN(0,0);
7780
7781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7782 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7783
7784 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7785 IEM_MC_FPU_STACK_INC_TOP();
7786 IEM_MC_UPDATE_FSW_CONST(0);
7787
7788 IEM_MC_ADVANCE_RIP();
7789 IEM_MC_END();
7790 return VINF_SUCCESS;
7791}
7792
7793
7794/** Opcode 0xd9 0xf8. */
7795FNIEMOP_DEF(iemOp_fprem)
7796{
7797 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7798 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7799}
7800
7801
7802/** Opcode 0xd9 0xf9. */
7803FNIEMOP_DEF(iemOp_fyl2xp1)
7804{
7805 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7806 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7807}
7808
7809
7810/** Opcode 0xd9 0xfa. */
7811FNIEMOP_DEF(iemOp_fsqrt)
7812{
7813 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7814 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7815}
7816
7817
7818/** Opcode 0xd9 0xfb. */
7819FNIEMOP_DEF(iemOp_fsincos)
7820{
7821 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7822 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7823}
7824
7825
7826/** Opcode 0xd9 0xfc. */
7827FNIEMOP_DEF(iemOp_frndint)
7828{
7829 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7830 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7831}
7832
7833
7834/** Opcode 0xd9 0xfd. */
7835FNIEMOP_DEF(iemOp_fscale)
7836{
7837 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7838 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7839}
7840
7841
7842/** Opcode 0xd9 0xfe. */
7843FNIEMOP_DEF(iemOp_fsin)
7844{
7845 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7846 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7847}
7848
7849
7850/** Opcode 0xd9 0xff. */
7851FNIEMOP_DEF(iemOp_fcos)
7852{
7853 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7854 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7855}
7856
7857
7858/** Used by iemOp_EscF1. */
7859IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7860{
7861 /* 0xe0 */ iemOp_fchs,
7862 /* 0xe1 */ iemOp_fabs,
7863 /* 0xe2 */ iemOp_Invalid,
7864 /* 0xe3 */ iemOp_Invalid,
7865 /* 0xe4 */ iemOp_ftst,
7866 /* 0xe5 */ iemOp_fxam,
7867 /* 0xe6 */ iemOp_Invalid,
7868 /* 0xe7 */ iemOp_Invalid,
7869 /* 0xe8 */ iemOp_fld1,
7870 /* 0xe9 */ iemOp_fldl2t,
7871 /* 0xea */ iemOp_fldl2e,
7872 /* 0xeb */ iemOp_fldpi,
7873 /* 0xec */ iemOp_fldlg2,
7874 /* 0xed */ iemOp_fldln2,
7875 /* 0xee */ iemOp_fldz,
7876 /* 0xef */ iemOp_Invalid,
7877 /* 0xf0 */ iemOp_f2xm1,
7878 /* 0xf1 */ iemOp_fyl2x,
7879 /* 0xf2 */ iemOp_fptan,
7880 /* 0xf3 */ iemOp_fpatan,
7881 /* 0xf4 */ iemOp_fxtract,
7882 /* 0xf5 */ iemOp_fprem1,
7883 /* 0xf6 */ iemOp_fdecstp,
7884 /* 0xf7 */ iemOp_fincstp,
7885 /* 0xf8 */ iemOp_fprem,
7886 /* 0xf9 */ iemOp_fyl2xp1,
7887 /* 0xfa */ iemOp_fsqrt,
7888 /* 0xfb */ iemOp_fsincos,
7889 /* 0xfc */ iemOp_frndint,
7890 /* 0xfd */ iemOp_fscale,
7891 /* 0xfe */ iemOp_fsin,
7892 /* 0xff */ iemOp_fcos
7893};
7894
7895
7896/**
7897 * @opcode 0xd9
7898 */
7899FNIEMOP_DEF(iemOp_EscF1)
7900{
7901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7902 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7903
7904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7905 {
7906 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7907 {
7908 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7909 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7910 case 2:
7911 if (bRm == 0xd0)
7912 return FNIEMOP_CALL(iemOp_fnop);
7913 return IEMOP_RAISE_INVALID_OPCODE();
7914 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7915 case 4:
7916 case 5:
7917 case 6:
7918 case 7:
7919 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7920 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7922 }
7923 }
7924 else
7925 {
7926 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7927 {
7928 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7929 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7930 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7931 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7932 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7933 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7934 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7935 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7937 }
7938 }
7939}
7940
7941
7942/** Opcode 0xda 11/0. */
7943FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7944{
7945 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7947
7948 IEM_MC_BEGIN(0, 1);
7949 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7950
7951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7953
7954 IEM_MC_PREPARE_FPU_USAGE();
7955 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7957 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7958 IEM_MC_ENDIF();
7959 IEM_MC_UPDATE_FPU_OPCODE_IP();
7960 IEM_MC_ELSE()
7961 IEM_MC_FPU_STACK_UNDERFLOW(0);
7962 IEM_MC_ENDIF();
7963 IEM_MC_ADVANCE_RIP();
7964
7965 IEM_MC_END();
7966 return VINF_SUCCESS;
7967}
7968
7969
7970/** Opcode 0xda 11/1. */
7971FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7972{
7973 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7975
7976 IEM_MC_BEGIN(0, 1);
7977 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7978
7979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7981
7982 IEM_MC_PREPARE_FPU_USAGE();
7983 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7985 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7986 IEM_MC_ENDIF();
7987 IEM_MC_UPDATE_FPU_OPCODE_IP();
7988 IEM_MC_ELSE()
7989 IEM_MC_FPU_STACK_UNDERFLOW(0);
7990 IEM_MC_ENDIF();
7991 IEM_MC_ADVANCE_RIP();
7992
7993 IEM_MC_END();
7994 return VINF_SUCCESS;
7995}
7996
7997
7998/** Opcode 0xda 11/2. */
7999FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8000{
8001 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003
8004 IEM_MC_BEGIN(0, 1);
8005 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8006
8007 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8008 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8009
8010 IEM_MC_PREPARE_FPU_USAGE();
8011 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8012 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8013 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8014 IEM_MC_ENDIF();
8015 IEM_MC_UPDATE_FPU_OPCODE_IP();
8016 IEM_MC_ELSE()
8017 IEM_MC_FPU_STACK_UNDERFLOW(0);
8018 IEM_MC_ENDIF();
8019 IEM_MC_ADVANCE_RIP();
8020
8021 IEM_MC_END();
8022 return VINF_SUCCESS;
8023}
8024
8025
8026/** Opcode 0xda 11/3. */
8027FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8028{
8029 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031
8032 IEM_MC_BEGIN(0, 1);
8033 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8034
8035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8037
8038 IEM_MC_PREPARE_FPU_USAGE();
8039 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8040 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8041 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8042 IEM_MC_ENDIF();
8043 IEM_MC_UPDATE_FPU_OPCODE_IP();
8044 IEM_MC_ELSE()
8045 IEM_MC_FPU_STACK_UNDERFLOW(0);
8046 IEM_MC_ENDIF();
8047 IEM_MC_ADVANCE_RIP();
8048
8049 IEM_MC_END();
8050 return VINF_SUCCESS;
8051}
8052
8053
8054/**
8055 * Common worker for FPU instructions working on ST0 and STn, only affecting
8056 * flags, and popping twice when done.
8057 *
8058 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8059 */
8060FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8061{
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063
8064 IEM_MC_BEGIN(3, 1);
8065 IEM_MC_LOCAL(uint16_t, u16Fsw);
8066 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8069
8070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8072
8073 IEM_MC_PREPARE_FPU_USAGE();
8074 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8075 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8076 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8077 IEM_MC_ELSE()
8078 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8079 IEM_MC_ENDIF();
8080 IEM_MC_ADVANCE_RIP();
8081
8082 IEM_MC_END();
8083 return VINF_SUCCESS;
8084}
8085
8086
8087/** Opcode 0xda 0xe9. */
8088FNIEMOP_DEF(iemOp_fucompp)
8089{
8090 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8091 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8092}
8093
8094
8095/**
8096 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8097 * the result in ST0.
8098 *
8099 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8100 */
8101FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8102{
8103 IEM_MC_BEGIN(3, 3);
8104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8105 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8106 IEM_MC_LOCAL(int32_t, i32Val2);
8107 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8109 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8110
8111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8113
8114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8116 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8117
8118 IEM_MC_PREPARE_FPU_USAGE();
8119 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8120 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8121 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8122 IEM_MC_ELSE()
8123 IEM_MC_FPU_STACK_UNDERFLOW(0);
8124 IEM_MC_ENDIF();
8125 IEM_MC_ADVANCE_RIP();
8126
8127 IEM_MC_END();
8128 return VINF_SUCCESS;
8129}
8130
8131
8132/** Opcode 0xda !11/0. */
8133FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8134{
8135 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8136 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8137}
8138
8139
8140/** Opcode 0xda !11/1. */
8141FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8142{
8143 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8145}
8146
8147
8148/** Opcode 0xda !11/2. */
8149FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8150{
8151 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8152
8153 IEM_MC_BEGIN(3, 3);
8154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8155 IEM_MC_LOCAL(uint16_t, u16Fsw);
8156 IEM_MC_LOCAL(int32_t, i32Val2);
8157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8158 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8159 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8160
8161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163
8164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8166 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8167
8168 IEM_MC_PREPARE_FPU_USAGE();
8169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8171 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8172 IEM_MC_ELSE()
8173 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8174 IEM_MC_ENDIF();
8175 IEM_MC_ADVANCE_RIP();
8176
8177 IEM_MC_END();
8178 return VINF_SUCCESS;
8179}
8180
8181
8182/** Opcode 0xda !11/3. */
8183FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8184{
8185 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8186
8187 IEM_MC_BEGIN(3, 3);
8188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8189 IEM_MC_LOCAL(uint16_t, u16Fsw);
8190 IEM_MC_LOCAL(int32_t, i32Val2);
8191 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8192 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8193 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8194
8195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197
8198 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8199 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8200 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8201
8202 IEM_MC_PREPARE_FPU_USAGE();
8203 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8204 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8205 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8206 IEM_MC_ELSE()
8207 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8208 IEM_MC_ENDIF();
8209 IEM_MC_ADVANCE_RIP();
8210
8211 IEM_MC_END();
8212 return VINF_SUCCESS;
8213}
8214
8215
8216/** Opcode 0xda !11/4. */
8217FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8218{
8219 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8220 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8221}
8222
8223
8224/** Opcode 0xda !11/5. */
8225FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8226{
8227 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8228 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8229}
8230
8231
8232/** Opcode 0xda !11/6. */
8233FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8234{
8235 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8236 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8237}
8238
8239
8240/** Opcode 0xda !11/7. */
8241FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8242{
8243 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8244 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8245}
8246
8247
8248/**
8249 * @opcode 0xda
8250 */
8251FNIEMOP_DEF(iemOp_EscF2)
8252{
8253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8254 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8256 {
8257 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8258 {
8259 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8260 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8261 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8262 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8263 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8264 case 5:
8265 if (bRm == 0xe9)
8266 return FNIEMOP_CALL(iemOp_fucompp);
8267 return IEMOP_RAISE_INVALID_OPCODE();
8268 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8269 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8271 }
8272 }
8273 else
8274 {
8275 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8276 {
8277 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8278 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8279 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8280 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8281 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8282 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8283 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8284 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8286 }
8287 }
8288}
8289
8290
8291/** Opcode 0xdb !11/0. */
8292FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8293{
8294 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8295
8296 IEM_MC_BEGIN(2, 3);
8297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8298 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8299 IEM_MC_LOCAL(int32_t, i32Val);
8300 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8301 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8302
8303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8305
8306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8308 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8309
8310 IEM_MC_PREPARE_FPU_USAGE();
8311 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8312 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8313 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8314 IEM_MC_ELSE()
8315 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8316 IEM_MC_ENDIF();
8317 IEM_MC_ADVANCE_RIP();
8318
8319 IEM_MC_END();
8320 return VINF_SUCCESS;
8321}
8322
8323
8324/** Opcode 0xdb !11/1. */
8325FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8326{
8327 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8328 IEM_MC_BEGIN(3, 2);
8329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8330 IEM_MC_LOCAL(uint16_t, u16Fsw);
8331 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8332 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8334
8335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8338 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8339
8340 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8341 IEM_MC_PREPARE_FPU_USAGE();
8342 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8343 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8344 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8345 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8346 IEM_MC_ELSE()
8347 IEM_MC_IF_FCW_IM()
8348 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8349 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8350 IEM_MC_ENDIF();
8351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8352 IEM_MC_ENDIF();
8353 IEM_MC_ADVANCE_RIP();
8354
8355 IEM_MC_END();
8356 return VINF_SUCCESS;
8357}
8358
8359
8360/** Opcode 0xdb !11/2. */
8361FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8362{
8363 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8364 IEM_MC_BEGIN(3, 2);
8365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8366 IEM_MC_LOCAL(uint16_t, u16Fsw);
8367 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8368 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8370
8371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8375
8376 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8377 IEM_MC_PREPARE_FPU_USAGE();
8378 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8379 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8380 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8381 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8382 IEM_MC_ELSE()
8383 IEM_MC_IF_FCW_IM()
8384 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8385 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8386 IEM_MC_ENDIF();
8387 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8388 IEM_MC_ENDIF();
8389 IEM_MC_ADVANCE_RIP();
8390
8391 IEM_MC_END();
8392 return VINF_SUCCESS;
8393}
8394
8395
8396/** Opcode 0xdb !11/3. */
8397FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8398{
8399 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8400 IEM_MC_BEGIN(3, 2);
8401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8402 IEM_MC_LOCAL(uint16_t, u16Fsw);
8403 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8404 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8405 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8406
8407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8409 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8410 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8411
8412 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8413 IEM_MC_PREPARE_FPU_USAGE();
8414 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8415 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8416 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8417 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8418 IEM_MC_ELSE()
8419 IEM_MC_IF_FCW_IM()
8420 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8421 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8422 IEM_MC_ENDIF();
8423 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8424 IEM_MC_ENDIF();
8425 IEM_MC_ADVANCE_RIP();
8426
8427 IEM_MC_END();
8428 return VINF_SUCCESS;
8429}
8430
8431
8432/** Opcode 0xdb !11/5. */
8433FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8434{
8435 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8436
8437 IEM_MC_BEGIN(2, 3);
8438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8439 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8440 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8441 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8442 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8443
8444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8446
8447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8449 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8450
8451 IEM_MC_PREPARE_FPU_USAGE();
8452 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8453 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8454 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8455 IEM_MC_ELSE()
8456 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8457 IEM_MC_ENDIF();
8458 IEM_MC_ADVANCE_RIP();
8459
8460 IEM_MC_END();
8461 return VINF_SUCCESS;
8462}
8463
8464
8465/** Opcode 0xdb !11/7. */
8466FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8467{
8468 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8469 IEM_MC_BEGIN(3, 2);
8470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8471 IEM_MC_LOCAL(uint16_t, u16Fsw);
8472 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8473 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8475
8476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8480
8481 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8482 IEM_MC_PREPARE_FPU_USAGE();
8483 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8484 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8485 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8486 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8487 IEM_MC_ELSE()
8488 IEM_MC_IF_FCW_IM()
8489 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8490 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8491 IEM_MC_ENDIF();
8492 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8493 IEM_MC_ENDIF();
8494 IEM_MC_ADVANCE_RIP();
8495
8496 IEM_MC_END();
8497 return VINF_SUCCESS;
8498}
8499
8500
8501/** Opcode 0xdb 11/0. */
8502FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8503{
8504 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506
8507 IEM_MC_BEGIN(0, 1);
8508 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8509
8510 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8511 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8512
8513 IEM_MC_PREPARE_FPU_USAGE();
8514 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8515 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8516 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8517 IEM_MC_ENDIF();
8518 IEM_MC_UPDATE_FPU_OPCODE_IP();
8519 IEM_MC_ELSE()
8520 IEM_MC_FPU_STACK_UNDERFLOW(0);
8521 IEM_MC_ENDIF();
8522 IEM_MC_ADVANCE_RIP();
8523
8524 IEM_MC_END();
8525 return VINF_SUCCESS;
8526}
8527
8528
8529/** Opcode 0xdb 11/1. */
8530FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8531{
8532 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534
8535 IEM_MC_BEGIN(0, 1);
8536 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8537
8538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8540
8541 IEM_MC_PREPARE_FPU_USAGE();
8542 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8543 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8544 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8545 IEM_MC_ENDIF();
8546 IEM_MC_UPDATE_FPU_OPCODE_IP();
8547 IEM_MC_ELSE()
8548 IEM_MC_FPU_STACK_UNDERFLOW(0);
8549 IEM_MC_ENDIF();
8550 IEM_MC_ADVANCE_RIP();
8551
8552 IEM_MC_END();
8553 return VINF_SUCCESS;
8554}
8555
8556
8557/** Opcode 0xdb 11/2. */
8558FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8559{
8560 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8562
8563 IEM_MC_BEGIN(0, 1);
8564 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8565
8566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8568
8569 IEM_MC_PREPARE_FPU_USAGE();
8570 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8571 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8572 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8573 IEM_MC_ENDIF();
8574 IEM_MC_UPDATE_FPU_OPCODE_IP();
8575 IEM_MC_ELSE()
8576 IEM_MC_FPU_STACK_UNDERFLOW(0);
8577 IEM_MC_ENDIF();
8578 IEM_MC_ADVANCE_RIP();
8579
8580 IEM_MC_END();
8581 return VINF_SUCCESS;
8582}
8583
8584
8585/** Opcode 0xdb 11/3. */
8586FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8587{
8588 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8590
8591 IEM_MC_BEGIN(0, 1);
8592 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8593
8594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8596
8597 IEM_MC_PREPARE_FPU_USAGE();
8598 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8599 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8600 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8601 IEM_MC_ENDIF();
8602 IEM_MC_UPDATE_FPU_OPCODE_IP();
8603 IEM_MC_ELSE()
8604 IEM_MC_FPU_STACK_UNDERFLOW(0);
8605 IEM_MC_ENDIF();
8606 IEM_MC_ADVANCE_RIP();
8607
8608 IEM_MC_END();
8609 return VINF_SUCCESS;
8610}
8611
8612
8613/** Opcode 0xdb 0xe0. */
8614FNIEMOP_DEF(iemOp_fneni)
8615{
8616 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8618 IEM_MC_BEGIN(0,0);
8619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8620 IEM_MC_ADVANCE_RIP();
8621 IEM_MC_END();
8622 return VINF_SUCCESS;
8623}
8624
8625
8626/** Opcode 0xdb 0xe1. */
8627FNIEMOP_DEF(iemOp_fndisi)
8628{
8629 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8631 IEM_MC_BEGIN(0,0);
8632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8633 IEM_MC_ADVANCE_RIP();
8634 IEM_MC_END();
8635 return VINF_SUCCESS;
8636}
8637
8638
8639/** Opcode 0xdb 0xe2. */
8640FNIEMOP_DEF(iemOp_fnclex)
8641{
8642 IEMOP_MNEMONIC(fnclex, "fnclex");
8643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8644
8645 IEM_MC_BEGIN(0,0);
8646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8647 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8648 IEM_MC_CLEAR_FSW_EX();
8649 IEM_MC_ADVANCE_RIP();
8650 IEM_MC_END();
8651 return VINF_SUCCESS;
8652}
8653
8654
8655/** Opcode 0xdb 0xe3. */
8656FNIEMOP_DEF(iemOp_fninit)
8657{
8658 IEMOP_MNEMONIC(fninit, "fninit");
8659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8660 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8661}
8662
8663
8664/** Opcode 0xdb 0xe4. */
8665FNIEMOP_DEF(iemOp_fnsetpm)
8666{
8667 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_BEGIN(0,0);
8670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 return VINF_SUCCESS;
8674}
8675
8676
8677/** Opcode 0xdb 0xe5. */
8678FNIEMOP_DEF(iemOp_frstpm)
8679{
8680 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8681#if 0 /* #UDs on newer CPUs */
8682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8683 IEM_MC_BEGIN(0,0);
8684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8685 IEM_MC_ADVANCE_RIP();
8686 IEM_MC_END();
8687 return VINF_SUCCESS;
8688#else
8689 return IEMOP_RAISE_INVALID_OPCODE();
8690#endif
8691}
8692
8693
8694/** Opcode 0xdb 11/5. */
8695FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8696{
8697 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8698 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8699}
8700
8701
8702/** Opcode 0xdb 11/6. */
8703FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8704{
8705 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8706 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8707}
8708
8709
8710/**
8711 * @opcode 0xdb
8712 */
8713FNIEMOP_DEF(iemOp_EscF3)
8714{
8715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8716 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8718 {
8719 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8720 {
8721 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8722 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8723 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8724 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8725 case 4:
8726 switch (bRm)
8727 {
8728 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8729 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8730 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8731 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8732 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8733 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8734 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8735 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8737 }
8738 break;
8739 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8740 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8741 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8743 }
8744 }
8745 else
8746 {
8747 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8748 {
8749 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8750 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8751 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8752 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8753 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8754 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8755 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8756 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8758 }
8759 }
8760}
8761
8762
8763/**
8764 * Common worker for FPU instructions working on STn and ST0, and storing the
8765 * result in STn unless IE, DE or ZE was raised.
8766 *
8767 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8768 */
8769FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8770{
8771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8772
8773 IEM_MC_BEGIN(3, 1);
8774 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8775 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8776 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8778
8779 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8780 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8781
8782 IEM_MC_PREPARE_FPU_USAGE();
8783 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8784 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8785 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8786 IEM_MC_ELSE()
8787 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8788 IEM_MC_ENDIF();
8789 IEM_MC_ADVANCE_RIP();
8790
8791 IEM_MC_END();
8792 return VINF_SUCCESS;
8793}
8794
8795
8796/** Opcode 0xdc 11/0. */
8797FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8798{
8799 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8800 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8801}
8802
8803
8804/** Opcode 0xdc 11/1. */
8805FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8806{
8807 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8808 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8809}
8810
8811
8812/** Opcode 0xdc 11/4. */
8813FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8814{
8815 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8816 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8817}
8818
8819
8820/** Opcode 0xdc 11/5. */
8821FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8822{
8823 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8824 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8825}
8826
8827
8828/** Opcode 0xdc 11/6. */
8829FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8830{
8831 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8832 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8833}
8834
8835
8836/** Opcode 0xdc 11/7. */
8837FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8838{
8839 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8840 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8841}
8842
8843
8844/**
8845 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8846 * memory operand, and storing the result in ST0.
8847 *
8848 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8849 */
8850FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8851{
8852 IEM_MC_BEGIN(3, 3);
8853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8854 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8855 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8856 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8857 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8858 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8859
8860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8863 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8864
8865 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8866 IEM_MC_PREPARE_FPU_USAGE();
8867 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8868 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8869 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8870 IEM_MC_ELSE()
8871 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8872 IEM_MC_ENDIF();
8873 IEM_MC_ADVANCE_RIP();
8874
8875 IEM_MC_END();
8876 return VINF_SUCCESS;
8877}
8878
8879
8880/** Opcode 0xdc !11/0. */
8881FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8882{
8883 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8884 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8885}
8886
8887
8888/** Opcode 0xdc !11/1. */
8889FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8890{
8891 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8892 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8893}
8894
8895
8896/** Opcode 0xdc !11/2. */
8897FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8898{
8899 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8900
8901 IEM_MC_BEGIN(3, 3);
8902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8903 IEM_MC_LOCAL(uint16_t, u16Fsw);
8904 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8906 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8907 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8908
8909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911
8912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8913 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8914 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8915
8916 IEM_MC_PREPARE_FPU_USAGE();
8917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8918 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8919 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8920 IEM_MC_ELSE()
8921 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8922 IEM_MC_ENDIF();
8923 IEM_MC_ADVANCE_RIP();
8924
8925 IEM_MC_END();
8926 return VINF_SUCCESS;
8927}
8928
8929
8930/** Opcode 0xdc !11/3. */
8931FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8932{
8933 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8934
8935 IEM_MC_BEGIN(3, 3);
8936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8937 IEM_MC_LOCAL(uint16_t, u16Fsw);
8938 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8939 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8940 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8941 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8942
8943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8945
8946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8948 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8949
8950 IEM_MC_PREPARE_FPU_USAGE();
8951 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8952 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8953 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8954 IEM_MC_ELSE()
8955 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8956 IEM_MC_ENDIF();
8957 IEM_MC_ADVANCE_RIP();
8958
8959 IEM_MC_END();
8960 return VINF_SUCCESS;
8961}
8962
8963
8964/** Opcode 0xdc !11/4. */
8965FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8966{
8967 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8968 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8969}
8970
8971
8972/** Opcode 0xdc !11/5. */
8973FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8974{
8975 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8976 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8977}
8978
8979
8980/** Opcode 0xdc !11/6. */
8981FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8982{
8983 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8984 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8985}
8986
8987
8988/** Opcode 0xdc !11/7. */
8989FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8990{
8991 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8992 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8993}
8994
8995
8996/**
8997 * @opcode 0xdc
8998 */
8999FNIEMOP_DEF(iemOp_EscF4)
9000{
9001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9002 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9004 {
9005 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9006 {
9007 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9008 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9009 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9010 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9011 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9012 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9013 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9014 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9016 }
9017 }
9018 else
9019 {
9020 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9021 {
9022 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9023 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9024 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9025 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9026 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9027 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9028 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9029 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9031 }
9032 }
9033}
9034
9035
9036/** Opcode 0xdd !11/0.
9037 * @sa iemOp_fld_m32r */
9038FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9039{
9040 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9041
9042 IEM_MC_BEGIN(2, 3);
9043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9044 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9045 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9046 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9047 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9048
9049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9051 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9052 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9053
9054 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9055 IEM_MC_PREPARE_FPU_USAGE();
9056 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9057 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9058 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9059 IEM_MC_ELSE()
9060 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9061 IEM_MC_ENDIF();
9062 IEM_MC_ADVANCE_RIP();
9063
9064 IEM_MC_END();
9065 return VINF_SUCCESS;
9066}
9067
9068
9069/** Opcode 0xdd !11/0. */
9070FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9071{
9072 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9073 IEM_MC_BEGIN(3, 2);
9074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9075 IEM_MC_LOCAL(uint16_t, u16Fsw);
9076 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9077 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9078 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9079
9080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9084
9085 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9086 IEM_MC_PREPARE_FPU_USAGE();
9087 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9088 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9089 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9090 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9091 IEM_MC_ELSE()
9092 IEM_MC_IF_FCW_IM()
9093 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9094 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9095 IEM_MC_ENDIF();
9096 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9097 IEM_MC_ENDIF();
9098 IEM_MC_ADVANCE_RIP();
9099
9100 IEM_MC_END();
9101 return VINF_SUCCESS;
9102}
9103
9104
9105/** Opcode 0xdd !11/0. */
9106FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9107{
9108 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9109 IEM_MC_BEGIN(3, 2);
9110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9111 IEM_MC_LOCAL(uint16_t, u16Fsw);
9112 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9113 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9115
9116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9120
9121 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9122 IEM_MC_PREPARE_FPU_USAGE();
9123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9124 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9125 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9126 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9127 IEM_MC_ELSE()
9128 IEM_MC_IF_FCW_IM()
9129 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9130 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9131 IEM_MC_ENDIF();
9132 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9133 IEM_MC_ENDIF();
9134 IEM_MC_ADVANCE_RIP();
9135
9136 IEM_MC_END();
9137 return VINF_SUCCESS;
9138}
9139
9140
9141
9142
9143/** Opcode 0xdd !11/0. */
9144FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9145{
9146 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9147 IEM_MC_BEGIN(3, 2);
9148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9149 IEM_MC_LOCAL(uint16_t, u16Fsw);
9150 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9151 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9152 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9153
9154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9156 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9157 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9158
9159 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9160 IEM_MC_PREPARE_FPU_USAGE();
9161 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9162 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9163 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9164 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9165 IEM_MC_ELSE()
9166 IEM_MC_IF_FCW_IM()
9167 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9168 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9169 IEM_MC_ENDIF();
9170 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9171 IEM_MC_ENDIF();
9172 IEM_MC_ADVANCE_RIP();
9173
9174 IEM_MC_END();
9175 return VINF_SUCCESS;
9176}
9177
9178
9179/** Opcode 0xdd !11/0. */
9180FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9181{
9182 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9183 IEM_MC_BEGIN(3, 0);
9184 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9185 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9186 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9190 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9191 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9192 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9193 IEM_MC_END();
9194 return VINF_SUCCESS;
9195}
9196
9197
9198/** Opcode 0xdd !11/0. */
9199FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9200{
9201 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9202 IEM_MC_BEGIN(3, 0);
9203 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9204 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9205 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9208 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9209 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9210 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9211 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9212 IEM_MC_END();
9213 return VINF_SUCCESS;
9214
9215}
9216
9217/** Opcode 0xdd !11/0. */
9218FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9219{
9220 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9221
9222 IEM_MC_BEGIN(0, 2);
9223 IEM_MC_LOCAL(uint16_t, u16Tmp);
9224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9225
9226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9229
9230 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9231 IEM_MC_FETCH_FSW(u16Tmp);
9232 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9233 IEM_MC_ADVANCE_RIP();
9234
9235/** @todo Debug / drop a hint to the verifier that things may differ
9236 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9237 * NT4SP1. (X86_FSW_PE) */
9238 IEM_MC_END();
9239 return VINF_SUCCESS;
9240}
9241
9242
9243/** Opcode 0xdd 11/0. */
9244FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9245{
9246 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9248 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9249 unmodified. */
9250
9251 IEM_MC_BEGIN(0, 0);
9252
9253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9255
9256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9257 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9258 IEM_MC_UPDATE_FPU_OPCODE_IP();
9259
9260 IEM_MC_ADVANCE_RIP();
9261 IEM_MC_END();
9262 return VINF_SUCCESS;
9263}
9264
9265
9266/** Opcode 0xdd 11/1. */
9267FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9268{
9269 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9271
9272 IEM_MC_BEGIN(0, 2);
9273 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9277
9278 IEM_MC_PREPARE_FPU_USAGE();
9279 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9280 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9281 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9282 IEM_MC_ELSE()
9283 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9284 IEM_MC_ENDIF();
9285
9286 IEM_MC_ADVANCE_RIP();
9287 IEM_MC_END();
9288 return VINF_SUCCESS;
9289}
9290
9291
9292/** Opcode 0xdd 11/3. */
9293FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9294{
9295 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9296 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9297}
9298
9299
9300/** Opcode 0xdd 11/4. */
9301FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9302{
9303 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9304 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9305}
9306
9307
9308/**
9309 * @opcode 0xdd
9310 */
9311FNIEMOP_DEF(iemOp_EscF5)
9312{
9313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9314 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9316 {
9317 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9318 {
9319 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9320 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9321 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9322 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9323 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9324 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9325 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9326 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9328 }
9329 }
9330 else
9331 {
9332 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9333 {
9334 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9335 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9336 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9337 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9338 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9339 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9340 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9341 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9343 }
9344 }
9345}
9346
9347
9348/** Opcode 0xde 11/0. */
9349FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9350{
9351 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9352 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9353}
9354
9355
9356/** Opcode 0xde 11/0. */
9357FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9358{
9359 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9360 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9361}
9362
9363
9364/** Opcode 0xde 0xd9. */
9365FNIEMOP_DEF(iemOp_fcompp)
9366{
9367 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9368 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9369}
9370
9371
9372/** Opcode 0xde 11/4. */
9373FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9376 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9377}
9378
9379
9380/** Opcode 0xde 11/5. */
9381FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9384 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xde 11/6. */
9389FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9392 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9393}
9394
9395
9396/** Opcode 0xde 11/7. */
9397FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9400 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9401}
9402
9403
9404/**
9405 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9406 * the result in ST0.
9407 *
9408 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9409 */
9410FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9411{
9412 IEM_MC_BEGIN(3, 3);
9413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9414 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9415 IEM_MC_LOCAL(int16_t, i16Val2);
9416 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9417 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9418 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9419
9420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9422
9423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9425 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9426
9427 IEM_MC_PREPARE_FPU_USAGE();
9428 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9429 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9430 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9431 IEM_MC_ELSE()
9432 IEM_MC_FPU_STACK_UNDERFLOW(0);
9433 IEM_MC_ENDIF();
9434 IEM_MC_ADVANCE_RIP();
9435
9436 IEM_MC_END();
9437 return VINF_SUCCESS;
9438}
9439
9440
9441/** Opcode 0xde !11/0. */
9442FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9443{
9444 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9445 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9446}
9447
9448
9449/** Opcode 0xde !11/1. */
9450FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9451{
9452 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9453 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9454}
9455
9456
9457/** Opcode 0xde !11/2. */
9458FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9459{
9460 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9461
9462 IEM_MC_BEGIN(3, 3);
9463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9464 IEM_MC_LOCAL(uint16_t, u16Fsw);
9465 IEM_MC_LOCAL(int16_t, i16Val2);
9466 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9467 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9468 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9469
9470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9472
9473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9475 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9476
9477 IEM_MC_PREPARE_FPU_USAGE();
9478 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9479 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9480 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9481 IEM_MC_ELSE()
9482 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9483 IEM_MC_ENDIF();
9484 IEM_MC_ADVANCE_RIP();
9485
9486 IEM_MC_END();
9487 return VINF_SUCCESS;
9488}
9489
9490
9491/** Opcode 0xde !11/3. */
9492FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9493{
9494 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9495
9496 IEM_MC_BEGIN(3, 3);
9497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9498 IEM_MC_LOCAL(uint16_t, u16Fsw);
9499 IEM_MC_LOCAL(int16_t, i16Val2);
9500 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9501 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9502 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9503
9504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9506
9507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9508 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9509 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9510
9511 IEM_MC_PREPARE_FPU_USAGE();
9512 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9513 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9514 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9515 IEM_MC_ELSE()
9516 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9517 IEM_MC_ENDIF();
9518 IEM_MC_ADVANCE_RIP();
9519
9520 IEM_MC_END();
9521 return VINF_SUCCESS;
9522}
9523
9524
9525/** Opcode 0xde !11/4. */
9526FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9527{
9528 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9529 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9530}
9531
9532
9533/** Opcode 0xde !11/5. */
9534FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9535{
9536 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9537 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9538}
9539
9540
9541/** Opcode 0xde !11/6. */
9542FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9543{
9544 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9545 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9546}
9547
9548
9549/** Opcode 0xde !11/7. */
9550FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9551{
9552 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9553 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9554}
9555
9556
9557/**
9558 * @opcode 0xde
9559 */
9560FNIEMOP_DEF(iemOp_EscF6)
9561{
9562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9563 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9565 {
9566 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9567 {
9568 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9569 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9570 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9571 case 3: if (bRm == 0xd9)
9572 return FNIEMOP_CALL(iemOp_fcompp);
9573 return IEMOP_RAISE_INVALID_OPCODE();
9574 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9575 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9576 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9577 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9579 }
9580 }
9581 else
9582 {
9583 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9584 {
9585 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9586 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9587 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9588 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9589 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9590 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9591 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9592 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9594 }
9595 }
9596}
9597
9598
9599/** Opcode 0xdf 11/0.
9600 * Undocument instruction, assumed to work like ffree + fincstp. */
9601FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9602{
9603 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9605
9606 IEM_MC_BEGIN(0, 0);
9607
9608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9610
9611 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9612 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9613 IEM_MC_FPU_STACK_INC_TOP();
9614 IEM_MC_UPDATE_FPU_OPCODE_IP();
9615
9616 IEM_MC_ADVANCE_RIP();
9617 IEM_MC_END();
9618 return VINF_SUCCESS;
9619}
9620
9621
9622/** Opcode 0xdf 0xe0. */
9623FNIEMOP_DEF(iemOp_fnstsw_ax)
9624{
9625 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9627
9628 IEM_MC_BEGIN(0, 1);
9629 IEM_MC_LOCAL(uint16_t, u16Tmp);
9630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9631 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9632 IEM_MC_FETCH_FSW(u16Tmp);
9633 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9634 IEM_MC_ADVANCE_RIP();
9635 IEM_MC_END();
9636 return VINF_SUCCESS;
9637}
9638
9639
9640/** Opcode 0xdf 11/5. */
9641FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9642{
9643 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9644 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9645}
9646
9647
9648/** Opcode 0xdf 11/6. */
9649FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9650{
9651 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9652 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9653}
9654
9655
9656/** Opcode 0xdf !11/0. */
9657FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9658{
9659 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9660
9661 IEM_MC_BEGIN(2, 3);
9662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9664 IEM_MC_LOCAL(int16_t, i16Val);
9665 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9666 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9667
9668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9670
9671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9673 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9674
9675 IEM_MC_PREPARE_FPU_USAGE();
9676 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9677 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9678 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9679 IEM_MC_ELSE()
9680 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9681 IEM_MC_ENDIF();
9682 IEM_MC_ADVANCE_RIP();
9683
9684 IEM_MC_END();
9685 return VINF_SUCCESS;
9686}
9687
9688
9689/** Opcode 0xdf !11/1. */
9690FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9691{
9692 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9693 IEM_MC_BEGIN(3, 2);
9694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9695 IEM_MC_LOCAL(uint16_t, u16Fsw);
9696 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9697 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9698 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9699
9700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9704
9705 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9706 IEM_MC_PREPARE_FPU_USAGE();
9707 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9708 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9709 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9710 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9711 IEM_MC_ELSE()
9712 IEM_MC_IF_FCW_IM()
9713 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9714 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9715 IEM_MC_ENDIF();
9716 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9717 IEM_MC_ENDIF();
9718 IEM_MC_ADVANCE_RIP();
9719
9720 IEM_MC_END();
9721 return VINF_SUCCESS;
9722}
9723
9724
9725/** Opcode 0xdf !11/2. */
9726FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9727{
9728 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9729 IEM_MC_BEGIN(3, 2);
9730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9731 IEM_MC_LOCAL(uint16_t, u16Fsw);
9732 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9733 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9735
9736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9740
9741 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9742 IEM_MC_PREPARE_FPU_USAGE();
9743 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9744 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9745 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9746 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9747 IEM_MC_ELSE()
9748 IEM_MC_IF_FCW_IM()
9749 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9750 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9751 IEM_MC_ENDIF();
9752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9753 IEM_MC_ENDIF();
9754 IEM_MC_ADVANCE_RIP();
9755
9756 IEM_MC_END();
9757 return VINF_SUCCESS;
9758}
9759
9760
9761/** Opcode 0xdf !11/3. */
9762FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9763{
9764 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9765 IEM_MC_BEGIN(3, 2);
9766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9767 IEM_MC_LOCAL(uint16_t, u16Fsw);
9768 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9769 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9770 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9771
9772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9776
9777 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9778 IEM_MC_PREPARE_FPU_USAGE();
9779 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9780 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9781 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9782 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9783 IEM_MC_ELSE()
9784 IEM_MC_IF_FCW_IM()
9785 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9786 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9787 IEM_MC_ENDIF();
9788 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9789 IEM_MC_ENDIF();
9790 IEM_MC_ADVANCE_RIP();
9791
9792 IEM_MC_END();
9793 return VINF_SUCCESS;
9794}
9795
9796
9797/** Opcode 0xdf !11/4. */
9798FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9799
9800
9801/** Opcode 0xdf !11/5. */
9802FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9803{
9804 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9805
9806 IEM_MC_BEGIN(2, 3);
9807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9808 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9809 IEM_MC_LOCAL(int64_t, i64Val);
9810 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9811 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9812
9813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9815
9816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9818 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9819
9820 IEM_MC_PREPARE_FPU_USAGE();
9821 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9822 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9823 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9824 IEM_MC_ELSE()
9825 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9826 IEM_MC_ENDIF();
9827 IEM_MC_ADVANCE_RIP();
9828
9829 IEM_MC_END();
9830 return VINF_SUCCESS;
9831}
9832
9833
9834/** Opcode 0xdf !11/6. */
9835FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9836
9837
9838/** Opcode 0xdf !11/7. */
9839FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9840{
9841 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9842 IEM_MC_BEGIN(3, 2);
9843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9844 IEM_MC_LOCAL(uint16_t, u16Fsw);
9845 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9846 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9847 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9848
9849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9853
9854 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9855 IEM_MC_PREPARE_FPU_USAGE();
9856 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9857 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9858 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9859 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9860 IEM_MC_ELSE()
9861 IEM_MC_IF_FCW_IM()
9862 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9863 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9864 IEM_MC_ENDIF();
9865 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9866 IEM_MC_ENDIF();
9867 IEM_MC_ADVANCE_RIP();
9868
9869 IEM_MC_END();
9870 return VINF_SUCCESS;
9871}
9872
9873
9874/**
9875 * @opcode 0xdf
9876 */
9877FNIEMOP_DEF(iemOp_EscF7)
9878{
9879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9881 {
9882 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9883 {
9884 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9885 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9886 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9887 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9888 case 4: if (bRm == 0xe0)
9889 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9890 return IEMOP_RAISE_INVALID_OPCODE();
9891 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9892 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9893 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9895 }
9896 }
9897 else
9898 {
9899 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9900 {
9901 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9902 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9903 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9904 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9905 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9906 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9907 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9908 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9910 }
9911 }
9912}
9913
9914
9915/**
9916 * @opcode 0xe0
9917 */
9918FNIEMOP_DEF(iemOp_loopne_Jb)
9919{
9920 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9921 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9924
9925 switch (pVCpu->iem.s.enmEffAddrMode)
9926 {
9927 case IEMMODE_16BIT:
9928 IEM_MC_BEGIN(0,0);
9929 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9930 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9931 IEM_MC_REL_JMP_S8(i8Imm);
9932 } IEM_MC_ELSE() {
9933 IEM_MC_ADVANCE_RIP();
9934 } IEM_MC_ENDIF();
9935 IEM_MC_END();
9936 return VINF_SUCCESS;
9937
9938 case IEMMODE_32BIT:
9939 IEM_MC_BEGIN(0,0);
9940 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9941 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9942 IEM_MC_REL_JMP_S8(i8Imm);
9943 } IEM_MC_ELSE() {
9944 IEM_MC_ADVANCE_RIP();
9945 } IEM_MC_ENDIF();
9946 IEM_MC_END();
9947 return VINF_SUCCESS;
9948
9949 case IEMMODE_64BIT:
9950 IEM_MC_BEGIN(0,0);
9951 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9952 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9953 IEM_MC_REL_JMP_S8(i8Imm);
9954 } IEM_MC_ELSE() {
9955 IEM_MC_ADVANCE_RIP();
9956 } IEM_MC_ENDIF();
9957 IEM_MC_END();
9958 return VINF_SUCCESS;
9959
9960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9961 }
9962}
9963
9964
9965/**
9966 * @opcode 0xe1
9967 */
9968FNIEMOP_DEF(iemOp_loope_Jb)
9969{
9970 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9971 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9973 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9974
9975 switch (pVCpu->iem.s.enmEffAddrMode)
9976 {
9977 case IEMMODE_16BIT:
9978 IEM_MC_BEGIN(0,0);
9979 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9980 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9981 IEM_MC_REL_JMP_S8(i8Imm);
9982 } IEM_MC_ELSE() {
9983 IEM_MC_ADVANCE_RIP();
9984 } IEM_MC_ENDIF();
9985 IEM_MC_END();
9986 return VINF_SUCCESS;
9987
9988 case IEMMODE_32BIT:
9989 IEM_MC_BEGIN(0,0);
9990 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9991 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9992 IEM_MC_REL_JMP_S8(i8Imm);
9993 } IEM_MC_ELSE() {
9994 IEM_MC_ADVANCE_RIP();
9995 } IEM_MC_ENDIF();
9996 IEM_MC_END();
9997 return VINF_SUCCESS;
9998
9999 case IEMMODE_64BIT:
10000 IEM_MC_BEGIN(0,0);
10001 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10002 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10003 IEM_MC_REL_JMP_S8(i8Imm);
10004 } IEM_MC_ELSE() {
10005 IEM_MC_ADVANCE_RIP();
10006 } IEM_MC_ENDIF();
10007 IEM_MC_END();
10008 return VINF_SUCCESS;
10009
10010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10011 }
10012}
10013
10014
10015/**
10016 * @opcode 0xe2
10017 */
10018FNIEMOP_DEF(iemOp_loop_Jb)
10019{
10020 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10024
10025 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10026 * using the 32-bit operand size override. How can that be restarted? See
10027 * weird pseudo code in intel manual. */
10028 switch (pVCpu->iem.s.enmEffAddrMode)
10029 {
10030 case IEMMODE_16BIT:
10031 IEM_MC_BEGIN(0,0);
10032 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10033 {
10034 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10035 IEM_MC_IF_CX_IS_NZ() {
10036 IEM_MC_REL_JMP_S8(i8Imm);
10037 } IEM_MC_ELSE() {
10038 IEM_MC_ADVANCE_RIP();
10039 } IEM_MC_ENDIF();
10040 }
10041 else
10042 {
10043 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10044 IEM_MC_ADVANCE_RIP();
10045 }
10046 IEM_MC_END();
10047 return VINF_SUCCESS;
10048
10049 case IEMMODE_32BIT:
10050 IEM_MC_BEGIN(0,0);
10051 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10052 {
10053 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10054 IEM_MC_IF_ECX_IS_NZ() {
10055 IEM_MC_REL_JMP_S8(i8Imm);
10056 } IEM_MC_ELSE() {
10057 IEM_MC_ADVANCE_RIP();
10058 } IEM_MC_ENDIF();
10059 }
10060 else
10061 {
10062 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10063 IEM_MC_ADVANCE_RIP();
10064 }
10065 IEM_MC_END();
10066 return VINF_SUCCESS;
10067
10068 case IEMMODE_64BIT:
10069 IEM_MC_BEGIN(0,0);
10070 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10071 {
10072 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10073 IEM_MC_IF_RCX_IS_NZ() {
10074 IEM_MC_REL_JMP_S8(i8Imm);
10075 } IEM_MC_ELSE() {
10076 IEM_MC_ADVANCE_RIP();
10077 } IEM_MC_ENDIF();
10078 }
10079 else
10080 {
10081 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10082 IEM_MC_ADVANCE_RIP();
10083 }
10084 IEM_MC_END();
10085 return VINF_SUCCESS;
10086
10087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10088 }
10089}
10090
10091
10092/**
10093 * @opcode 0xe3
10094 */
10095FNIEMOP_DEF(iemOp_jecxz_Jb)
10096{
10097 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10098 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10101
10102 switch (pVCpu->iem.s.enmEffAddrMode)
10103 {
10104 case IEMMODE_16BIT:
10105 IEM_MC_BEGIN(0,0);
10106 IEM_MC_IF_CX_IS_NZ() {
10107 IEM_MC_ADVANCE_RIP();
10108 } IEM_MC_ELSE() {
10109 IEM_MC_REL_JMP_S8(i8Imm);
10110 } IEM_MC_ENDIF();
10111 IEM_MC_END();
10112 return VINF_SUCCESS;
10113
10114 case IEMMODE_32BIT:
10115 IEM_MC_BEGIN(0,0);
10116 IEM_MC_IF_ECX_IS_NZ() {
10117 IEM_MC_ADVANCE_RIP();
10118 } IEM_MC_ELSE() {
10119 IEM_MC_REL_JMP_S8(i8Imm);
10120 } IEM_MC_ENDIF();
10121 IEM_MC_END();
10122 return VINF_SUCCESS;
10123
10124 case IEMMODE_64BIT:
10125 IEM_MC_BEGIN(0,0);
10126 IEM_MC_IF_RCX_IS_NZ() {
10127 IEM_MC_ADVANCE_RIP();
10128 } IEM_MC_ELSE() {
10129 IEM_MC_REL_JMP_S8(i8Imm);
10130 } IEM_MC_ENDIF();
10131 IEM_MC_END();
10132 return VINF_SUCCESS;
10133
10134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10135 }
10136}
10137
10138
10139/** Opcode 0xe4 */
10140FNIEMOP_DEF(iemOp_in_AL_Ib)
10141{
10142 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10143 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10145 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10146}
10147
10148
10149/** Opcode 0xe5 */
10150FNIEMOP_DEF(iemOp_in_eAX_Ib)
10151{
10152 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10155 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10156}
10157
10158
10159/** Opcode 0xe6 */
10160FNIEMOP_DEF(iemOp_out_Ib_AL)
10161{
10162 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10166}
10167
10168
10169/** Opcode 0xe7 */
10170FNIEMOP_DEF(iemOp_out_Ib_eAX)
10171{
10172 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10173 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10175 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10176}
10177
10178
10179/**
10180 * @opcode 0xe8
10181 */
10182FNIEMOP_DEF(iemOp_call_Jv)
10183{
10184 IEMOP_MNEMONIC(call_Jv, "call Jv");
10185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10186 switch (pVCpu->iem.s.enmEffOpSize)
10187 {
10188 case IEMMODE_16BIT:
10189 {
10190 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10191 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10192 }
10193
10194 case IEMMODE_32BIT:
10195 {
10196 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10197 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10198 }
10199
10200 case IEMMODE_64BIT:
10201 {
10202 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10203 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10204 }
10205
10206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10207 }
10208}
10209
10210
10211/**
10212 * @opcode 0xe9
10213 */
10214FNIEMOP_DEF(iemOp_jmp_Jv)
10215{
10216 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10217 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10218 switch (pVCpu->iem.s.enmEffOpSize)
10219 {
10220 case IEMMODE_16BIT:
10221 {
10222 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10223 IEM_MC_BEGIN(0, 0);
10224 IEM_MC_REL_JMP_S16(i16Imm);
10225 IEM_MC_END();
10226 return VINF_SUCCESS;
10227 }
10228
10229 case IEMMODE_64BIT:
10230 case IEMMODE_32BIT:
10231 {
10232 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10233 IEM_MC_BEGIN(0, 0);
10234 IEM_MC_REL_JMP_S32(i32Imm);
10235 IEM_MC_END();
10236 return VINF_SUCCESS;
10237 }
10238
10239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10240 }
10241}
10242
10243
10244/**
10245 * @opcode 0xea
10246 */
10247FNIEMOP_DEF(iemOp_jmp_Ap)
10248{
10249 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10250 IEMOP_HLP_NO_64BIT();
10251
10252 /* Decode the far pointer address and pass it on to the far call C implementation. */
10253 uint32_t offSeg;
10254 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10255 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10256 else
10257 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10258 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10260 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10261}
10262
10263
10264/**
10265 * @opcode 0xeb
10266 */
10267FNIEMOP_DEF(iemOp_jmp_Jb)
10268{
10269 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10270 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10272 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10273
10274 IEM_MC_BEGIN(0, 0);
10275 IEM_MC_REL_JMP_S8(i8Imm);
10276 IEM_MC_END();
10277 return VINF_SUCCESS;
10278}
10279
10280
10281/** Opcode 0xec */
10282FNIEMOP_DEF(iemOp_in_AL_DX)
10283{
10284 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10286 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10287}
10288
10289
10290/** Opcode 0xed */
10291FNIEMOP_DEF(iemOp_eAX_DX)
10292{
10293 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10295 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10296}
10297
10298
10299/** Opcode 0xee */
10300FNIEMOP_DEF(iemOp_out_DX_AL)
10301{
10302 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10304 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10305}
10306
10307
10308/** Opcode 0xef */
10309FNIEMOP_DEF(iemOp_out_DX_eAX)
10310{
10311 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10313 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10314}
10315
10316
10317/**
10318 * @opcode 0xf0
10319 */
10320FNIEMOP_DEF(iemOp_lock)
10321{
10322 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10323 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10324
10325 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10326 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10327}
10328
10329
10330/**
10331 * @opcode 0xf1
10332 */
10333FNIEMOP_DEF(iemOp_int1)
10334{
10335 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10336 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10337 /** @todo testcase! */
10338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10339}
10340
10341
10342/**
10343 * @opcode 0xf2
10344 */
10345FNIEMOP_DEF(iemOp_repne)
10346{
10347 /* This overrides any previous REPE prefix. */
10348 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10349 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10350 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10351
10352 /* For the 4 entry opcode tables, REPNZ overrides any previous
10353 REPZ and operand size prefixes. */
10354 pVCpu->iem.s.idxPrefix = 3;
10355
10356 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10357 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10358}
10359
10360
10361/**
10362 * @opcode 0xf3
10363 */
10364FNIEMOP_DEF(iemOp_repe)
10365{
10366 /* This overrides any previous REPNE prefix. */
10367 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10368 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10369 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10370
10371 /* For the 4 entry opcode tables, REPNZ overrides any previous
10372 REPNZ and operand size prefixes. */
10373 pVCpu->iem.s.idxPrefix = 2;
10374
10375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10377}
10378
10379
10380/**
10381 * @opcode 0xf4
10382 */
10383FNIEMOP_DEF(iemOp_hlt)
10384{
10385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10386 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10387}
10388
10389
10390/**
10391 * @opcode 0xf5
10392 */
10393FNIEMOP_DEF(iemOp_cmc)
10394{
10395 IEMOP_MNEMONIC(cmc, "cmc");
10396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10397 IEM_MC_BEGIN(0, 0);
10398 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10399 IEM_MC_ADVANCE_RIP();
10400 IEM_MC_END();
10401 return VINF_SUCCESS;
10402}
10403
10404
10405/**
10406 * Common implementation of 'inc/dec/not/neg Eb'.
10407 *
10408 * @param bRm The RM byte.
10409 * @param pImpl The instruction implementation.
10410 */
10411FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10412{
10413 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10414 {
10415 /* register access */
10416 IEM_MC_BEGIN(2, 0);
10417 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10418 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10419 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10420 IEM_MC_REF_EFLAGS(pEFlags);
10421 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10422 IEM_MC_ADVANCE_RIP();
10423 IEM_MC_END();
10424 }
10425 else
10426 {
10427 /* memory access. */
10428 IEM_MC_BEGIN(2, 2);
10429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10430 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10432
10433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10434 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10435 IEM_MC_FETCH_EFLAGS(EFlags);
10436 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10437 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10438 else
10439 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10440
10441 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10442 IEM_MC_COMMIT_EFLAGS(EFlags);
10443 IEM_MC_ADVANCE_RIP();
10444 IEM_MC_END();
10445 }
10446 return VINF_SUCCESS;
10447}
10448
10449
10450/**
10451 * Common implementation of 'inc/dec/not/neg Ev'.
10452 *
10453 * @param bRm The RM byte.
10454 * @param pImpl The instruction implementation.
10455 */
10456FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10457{
10458 /* Registers are handled by a common worker. */
10459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10460 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10461
10462 /* Memory we do here. */
10463 switch (pVCpu->iem.s.enmEffOpSize)
10464 {
10465 case IEMMODE_16BIT:
10466 IEM_MC_BEGIN(2, 2);
10467 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10470
10471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10472 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10473 IEM_MC_FETCH_EFLAGS(EFlags);
10474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10475 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10476 else
10477 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10478
10479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10480 IEM_MC_COMMIT_EFLAGS(EFlags);
10481 IEM_MC_ADVANCE_RIP();
10482 IEM_MC_END();
10483 return VINF_SUCCESS;
10484
10485 case IEMMODE_32BIT:
10486 IEM_MC_BEGIN(2, 2);
10487 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10490
10491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10492 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10493 IEM_MC_FETCH_EFLAGS(EFlags);
10494 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10495 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10496 else
10497 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10498
10499 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10500 IEM_MC_COMMIT_EFLAGS(EFlags);
10501 IEM_MC_ADVANCE_RIP();
10502 IEM_MC_END();
10503 return VINF_SUCCESS;
10504
10505 case IEMMODE_64BIT:
10506 IEM_MC_BEGIN(2, 2);
10507 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10510
10511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10512 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10513 IEM_MC_FETCH_EFLAGS(EFlags);
10514 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10515 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10516 else
10517 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10518
10519 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10520 IEM_MC_COMMIT_EFLAGS(EFlags);
10521 IEM_MC_ADVANCE_RIP();
10522 IEM_MC_END();
10523 return VINF_SUCCESS;
10524
10525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10526 }
10527}
10528
10529
10530/** Opcode 0xf6 /0. */
10531FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10532{
10533 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10534 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10535
10536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10537 {
10538 /* register access */
10539 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10541
10542 IEM_MC_BEGIN(3, 0);
10543 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10544 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10546 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10547 IEM_MC_REF_EFLAGS(pEFlags);
10548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10549 IEM_MC_ADVANCE_RIP();
10550 IEM_MC_END();
10551 }
10552 else
10553 {
10554 /* memory access. */
10555 IEM_MC_BEGIN(3, 2);
10556 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10557 IEM_MC_ARG(uint8_t, u8Src, 1);
10558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10560
10561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10562 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10563 IEM_MC_ASSIGN(u8Src, u8Imm);
10564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10565 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10566 IEM_MC_FETCH_EFLAGS(EFlags);
10567 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10568
10569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10570 IEM_MC_COMMIT_EFLAGS(EFlags);
10571 IEM_MC_ADVANCE_RIP();
10572 IEM_MC_END();
10573 }
10574 return VINF_SUCCESS;
10575}
10576
10577
10578/** Opcode 0xf7 /0. */
10579FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10580{
10581 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10582 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10583
10584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10585 {
10586 /* register access */
10587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10588 switch (pVCpu->iem.s.enmEffOpSize)
10589 {
10590 case IEMMODE_16BIT:
10591 {
10592 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10593 IEM_MC_BEGIN(3, 0);
10594 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10595 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10597 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10598 IEM_MC_REF_EFLAGS(pEFlags);
10599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10600 IEM_MC_ADVANCE_RIP();
10601 IEM_MC_END();
10602 return VINF_SUCCESS;
10603 }
10604
10605 case IEMMODE_32BIT:
10606 {
10607 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10608 IEM_MC_BEGIN(3, 0);
10609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10610 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10612 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10613 IEM_MC_REF_EFLAGS(pEFlags);
10614 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10615 /* No clearing the high dword here - test doesn't write back the result. */
10616 IEM_MC_ADVANCE_RIP();
10617 IEM_MC_END();
10618 return VINF_SUCCESS;
10619 }
10620
10621 case IEMMODE_64BIT:
10622 {
10623 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10624 IEM_MC_BEGIN(3, 0);
10625 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10626 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10627 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10628 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10629 IEM_MC_REF_EFLAGS(pEFlags);
10630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10631 IEM_MC_ADVANCE_RIP();
10632 IEM_MC_END();
10633 return VINF_SUCCESS;
10634 }
10635
10636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10637 }
10638 }
10639 else
10640 {
10641 /* memory access. */
10642 switch (pVCpu->iem.s.enmEffOpSize)
10643 {
10644 case IEMMODE_16BIT:
10645 {
10646 IEM_MC_BEGIN(3, 2);
10647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10648 IEM_MC_ARG(uint16_t, u16Src, 1);
10649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10651
10652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10653 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10654 IEM_MC_ASSIGN(u16Src, u16Imm);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10657 IEM_MC_FETCH_EFLAGS(EFlags);
10658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10659
10660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10661 IEM_MC_COMMIT_EFLAGS(EFlags);
10662 IEM_MC_ADVANCE_RIP();
10663 IEM_MC_END();
10664 return VINF_SUCCESS;
10665 }
10666
10667 case IEMMODE_32BIT:
10668 {
10669 IEM_MC_BEGIN(3, 2);
10670 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10671 IEM_MC_ARG(uint32_t, u32Src, 1);
10672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10674
10675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10677 IEM_MC_ASSIGN(u32Src, u32Imm);
10678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10679 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10680 IEM_MC_FETCH_EFLAGS(EFlags);
10681 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10682
10683 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10684 IEM_MC_COMMIT_EFLAGS(EFlags);
10685 IEM_MC_ADVANCE_RIP();
10686 IEM_MC_END();
10687 return VINF_SUCCESS;
10688 }
10689
10690 case IEMMODE_64BIT:
10691 {
10692 IEM_MC_BEGIN(3, 2);
10693 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10694 IEM_MC_ARG(uint64_t, u64Src, 1);
10695 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10697
10698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10699 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10700 IEM_MC_ASSIGN(u64Src, u64Imm);
10701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10702 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10703 IEM_MC_FETCH_EFLAGS(EFlags);
10704 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10705
10706 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10707 IEM_MC_COMMIT_EFLAGS(EFlags);
10708 IEM_MC_ADVANCE_RIP();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711 }
10712
10713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10714 }
10715 }
10716}
10717
10718
10719/** Opcode 0xf6 /4, /5, /6 and /7. */
10720FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10721{
10722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10723 {
10724 /* register access */
10725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10726 IEM_MC_BEGIN(3, 1);
10727 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10728 IEM_MC_ARG(uint8_t, u8Value, 1);
10729 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10730 IEM_MC_LOCAL(int32_t, rc);
10731
10732 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10733 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10734 IEM_MC_REF_EFLAGS(pEFlags);
10735 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10736 IEM_MC_IF_LOCAL_IS_Z(rc) {
10737 IEM_MC_ADVANCE_RIP();
10738 } IEM_MC_ELSE() {
10739 IEM_MC_RAISE_DIVIDE_ERROR();
10740 } IEM_MC_ENDIF();
10741
10742 IEM_MC_END();
10743 }
10744 else
10745 {
10746 /* memory access. */
10747 IEM_MC_BEGIN(3, 2);
10748 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10749 IEM_MC_ARG(uint8_t, u8Value, 1);
10750 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10752 IEM_MC_LOCAL(int32_t, rc);
10753
10754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10756 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10757 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10758 IEM_MC_REF_EFLAGS(pEFlags);
10759 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10760 IEM_MC_IF_LOCAL_IS_Z(rc) {
10761 IEM_MC_ADVANCE_RIP();
10762 } IEM_MC_ELSE() {
10763 IEM_MC_RAISE_DIVIDE_ERROR();
10764 } IEM_MC_ENDIF();
10765
10766 IEM_MC_END();
10767 }
10768 return VINF_SUCCESS;
10769}
10770
10771
10772/** Opcode 0xf7 /4, /5, /6 and /7. */
10773FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10774{
10775 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10776
10777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10778 {
10779 /* register access */
10780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10781 switch (pVCpu->iem.s.enmEffOpSize)
10782 {
10783 case IEMMODE_16BIT:
10784 {
10785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10786 IEM_MC_BEGIN(4, 1);
10787 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10788 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10789 IEM_MC_ARG(uint16_t, u16Value, 2);
10790 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10791 IEM_MC_LOCAL(int32_t, rc);
10792
10793 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10794 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10795 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10796 IEM_MC_REF_EFLAGS(pEFlags);
10797 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10798 IEM_MC_IF_LOCAL_IS_Z(rc) {
10799 IEM_MC_ADVANCE_RIP();
10800 } IEM_MC_ELSE() {
10801 IEM_MC_RAISE_DIVIDE_ERROR();
10802 } IEM_MC_ENDIF();
10803
10804 IEM_MC_END();
10805 return VINF_SUCCESS;
10806 }
10807
10808 case IEMMODE_32BIT:
10809 {
10810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10811 IEM_MC_BEGIN(4, 1);
10812 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10813 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10814 IEM_MC_ARG(uint32_t, u32Value, 2);
10815 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10816 IEM_MC_LOCAL(int32_t, rc);
10817
10818 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10819 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10820 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10821 IEM_MC_REF_EFLAGS(pEFlags);
10822 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10823 IEM_MC_IF_LOCAL_IS_Z(rc) {
10824 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10825 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10826 IEM_MC_ADVANCE_RIP();
10827 } IEM_MC_ELSE() {
10828 IEM_MC_RAISE_DIVIDE_ERROR();
10829 } IEM_MC_ENDIF();
10830
10831 IEM_MC_END();
10832 return VINF_SUCCESS;
10833 }
10834
10835 case IEMMODE_64BIT:
10836 {
10837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10838 IEM_MC_BEGIN(4, 1);
10839 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10840 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10841 IEM_MC_ARG(uint64_t, u64Value, 2);
10842 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10843 IEM_MC_LOCAL(int32_t, rc);
10844
10845 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10846 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10847 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10848 IEM_MC_REF_EFLAGS(pEFlags);
10849 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10850 IEM_MC_IF_LOCAL_IS_Z(rc) {
10851 IEM_MC_ADVANCE_RIP();
10852 } IEM_MC_ELSE() {
10853 IEM_MC_RAISE_DIVIDE_ERROR();
10854 } IEM_MC_ENDIF();
10855
10856 IEM_MC_END();
10857 return VINF_SUCCESS;
10858 }
10859
10860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10861 }
10862 }
10863 else
10864 {
10865 /* memory access. */
10866 switch (pVCpu->iem.s.enmEffOpSize)
10867 {
10868 case IEMMODE_16BIT:
10869 {
10870 IEM_MC_BEGIN(4, 2);
10871 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10872 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10873 IEM_MC_ARG(uint16_t, u16Value, 2);
10874 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10876 IEM_MC_LOCAL(int32_t, rc);
10877
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10881 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10882 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10883 IEM_MC_REF_EFLAGS(pEFlags);
10884 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10885 IEM_MC_IF_LOCAL_IS_Z(rc) {
10886 IEM_MC_ADVANCE_RIP();
10887 } IEM_MC_ELSE() {
10888 IEM_MC_RAISE_DIVIDE_ERROR();
10889 } IEM_MC_ENDIF();
10890
10891 IEM_MC_END();
10892 return VINF_SUCCESS;
10893 }
10894
10895 case IEMMODE_32BIT:
10896 {
10897 IEM_MC_BEGIN(4, 2);
10898 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10899 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10900 IEM_MC_ARG(uint32_t, u32Value, 2);
10901 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10903 IEM_MC_LOCAL(int32_t, rc);
10904
10905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10907 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10908 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10909 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10910 IEM_MC_REF_EFLAGS(pEFlags);
10911 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10912 IEM_MC_IF_LOCAL_IS_Z(rc) {
10913 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10914 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10915 IEM_MC_ADVANCE_RIP();
10916 } IEM_MC_ELSE() {
10917 IEM_MC_RAISE_DIVIDE_ERROR();
10918 } IEM_MC_ENDIF();
10919
10920 IEM_MC_END();
10921 return VINF_SUCCESS;
10922 }
10923
10924 case IEMMODE_64BIT:
10925 {
10926 IEM_MC_BEGIN(4, 2);
10927 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10928 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10929 IEM_MC_ARG(uint64_t, u64Value, 2);
10930 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10932 IEM_MC_LOCAL(int32_t, rc);
10933
10934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10936 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10937 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10938 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10939 IEM_MC_REF_EFLAGS(pEFlags);
10940 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10941 IEM_MC_IF_LOCAL_IS_Z(rc) {
10942 IEM_MC_ADVANCE_RIP();
10943 } IEM_MC_ELSE() {
10944 IEM_MC_RAISE_DIVIDE_ERROR();
10945 } IEM_MC_ENDIF();
10946
10947 IEM_MC_END();
10948 return VINF_SUCCESS;
10949 }
10950
10951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10952 }
10953 }
10954}
10955
10956/**
10957 * @opcode 0xf6
10958 */
10959FNIEMOP_DEF(iemOp_Grp3_Eb)
10960{
10961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10963 {
10964 case 0:
10965 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10966 case 1:
10967/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10968 return IEMOP_RAISE_INVALID_OPCODE();
10969 case 2:
10970 IEMOP_MNEMONIC(not_Eb, "not Eb");
10971 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10972 case 3:
10973 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10974 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10975 case 4:
10976 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10977 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10978 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10979 case 5:
10980 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10982 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10983 case 6:
10984 IEMOP_MNEMONIC(div_Eb, "div Eb");
10985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10986 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10987 case 7:
10988 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10990 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10992 }
10993}
10994
10995
10996/**
10997 * @opcode 0xf7
10998 */
10999FNIEMOP_DEF(iemOp_Grp3_Ev)
11000{
11001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11002 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11003 {
11004 case 0:
11005 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11006 case 1:
11007/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11008 return IEMOP_RAISE_INVALID_OPCODE();
11009 case 2:
11010 IEMOP_MNEMONIC(not_Ev, "not Ev");
11011 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11012 case 3:
11013 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11014 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11015 case 4:
11016 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11017 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11018 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11019 case 5:
11020 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11022 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11023 case 6:
11024 IEMOP_MNEMONIC(div_Ev, "div Ev");
11025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11026 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11027 case 7:
11028 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11029 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11030 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11032 }
11033}
11034
11035
11036/**
11037 * @opcode 0xf8
11038 */
11039FNIEMOP_DEF(iemOp_clc)
11040{
11041 IEMOP_MNEMONIC(clc, "clc");
11042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11043 IEM_MC_BEGIN(0, 0);
11044 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11045 IEM_MC_ADVANCE_RIP();
11046 IEM_MC_END();
11047 return VINF_SUCCESS;
11048}
11049
11050
11051/**
11052 * @opcode 0xf9
11053 */
11054FNIEMOP_DEF(iemOp_stc)
11055{
11056 IEMOP_MNEMONIC(stc, "stc");
11057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11058 IEM_MC_BEGIN(0, 0);
11059 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11060 IEM_MC_ADVANCE_RIP();
11061 IEM_MC_END();
11062 return VINF_SUCCESS;
11063}
11064
11065
11066/**
11067 * @opcode 0xfa
11068 */
11069FNIEMOP_DEF(iemOp_cli)
11070{
11071 IEMOP_MNEMONIC(cli, "cli");
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11073 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11074}
11075
11076
11077FNIEMOP_DEF(iemOp_sti)
11078{
11079 IEMOP_MNEMONIC(sti, "sti");
11080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11082}
11083
11084
11085/**
11086 * @opcode 0xfc
11087 */
11088FNIEMOP_DEF(iemOp_cld)
11089{
11090 IEMOP_MNEMONIC(cld, "cld");
11091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11092 IEM_MC_BEGIN(0, 0);
11093 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11094 IEM_MC_ADVANCE_RIP();
11095 IEM_MC_END();
11096 return VINF_SUCCESS;
11097}
11098
11099
11100/**
11101 * @opcode 0xfd
11102 */
11103FNIEMOP_DEF(iemOp_std)
11104{
11105 IEMOP_MNEMONIC(std, "std");
11106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11107 IEM_MC_BEGIN(0, 0);
11108 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11109 IEM_MC_ADVANCE_RIP();
11110 IEM_MC_END();
11111 return VINF_SUCCESS;
11112}
11113
11114
11115/**
11116 * @opcode 0xfe
11117 */
11118FNIEMOP_DEF(iemOp_Grp4)
11119{
11120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11121 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11122 {
11123 case 0:
11124 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11125 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11126 case 1:
11127 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11128 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11129 default:
11130 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11131 return IEMOP_RAISE_INVALID_OPCODE();
11132 }
11133}
11134
11135
11136/**
11137 * Opcode 0xff /2.
11138 * @param bRm The RM byte.
11139 */
11140FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11141{
11142 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11143 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11144
11145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11146 {
11147 /* The new RIP is taken from a register. */
11148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11149 switch (pVCpu->iem.s.enmEffOpSize)
11150 {
11151 case IEMMODE_16BIT:
11152 IEM_MC_BEGIN(1, 0);
11153 IEM_MC_ARG(uint16_t, u16Target, 0);
11154 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11155 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11156 IEM_MC_END()
11157 return VINF_SUCCESS;
11158
11159 case IEMMODE_32BIT:
11160 IEM_MC_BEGIN(1, 0);
11161 IEM_MC_ARG(uint32_t, u32Target, 0);
11162 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11163 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11164 IEM_MC_END()
11165 return VINF_SUCCESS;
11166
11167 case IEMMODE_64BIT:
11168 IEM_MC_BEGIN(1, 0);
11169 IEM_MC_ARG(uint64_t, u64Target, 0);
11170 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11171 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11172 IEM_MC_END()
11173 return VINF_SUCCESS;
11174
11175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11176 }
11177 }
11178 else
11179 {
11180 /* The new RIP is taken from a register. */
11181 switch (pVCpu->iem.s.enmEffOpSize)
11182 {
11183 case IEMMODE_16BIT:
11184 IEM_MC_BEGIN(1, 1);
11185 IEM_MC_ARG(uint16_t, u16Target, 0);
11186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11189 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11190 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11191 IEM_MC_END()
11192 return VINF_SUCCESS;
11193
11194 case IEMMODE_32BIT:
11195 IEM_MC_BEGIN(1, 1);
11196 IEM_MC_ARG(uint32_t, u32Target, 0);
11197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11200 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11201 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11202 IEM_MC_END()
11203 return VINF_SUCCESS;
11204
11205 case IEMMODE_64BIT:
11206 IEM_MC_BEGIN(1, 1);
11207 IEM_MC_ARG(uint64_t, u64Target, 0);
11208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11212 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11213 IEM_MC_END()
11214 return VINF_SUCCESS;
11215
11216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11217 }
11218 }
11219}
11220
11221typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11222
11223FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11224{
11225 /* Registers? How?? */
11226 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11227 { /* likely */ }
11228 else
11229 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11230
11231 /* Far pointer loaded from memory. */
11232 switch (pVCpu->iem.s.enmEffOpSize)
11233 {
11234 case IEMMODE_16BIT:
11235 IEM_MC_BEGIN(3, 1);
11236 IEM_MC_ARG(uint16_t, u16Sel, 0);
11237 IEM_MC_ARG(uint16_t, offSeg, 1);
11238 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11242 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11243 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11244 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11245 IEM_MC_END();
11246 return VINF_SUCCESS;
11247
11248 case IEMMODE_64BIT:
11249 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11250 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11251 * and call far qword [rsp] encodings. */
11252 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11253 {
11254 IEM_MC_BEGIN(3, 1);
11255 IEM_MC_ARG(uint16_t, u16Sel, 0);
11256 IEM_MC_ARG(uint64_t, offSeg, 1);
11257 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11261 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11262 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11263 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11264 IEM_MC_END();
11265 return VINF_SUCCESS;
11266 }
11267 /* AMD falls thru. */
11268 /* fall thru */
11269
11270 case IEMMODE_32BIT:
11271 IEM_MC_BEGIN(3, 1);
11272 IEM_MC_ARG(uint16_t, u16Sel, 0);
11273 IEM_MC_ARG(uint32_t, offSeg, 1);
11274 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11278 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11279 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11280 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11281 IEM_MC_END();
11282 return VINF_SUCCESS;
11283
11284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11285 }
11286}
11287
11288
11289/**
11290 * Opcode 0xff /3.
11291 * @param bRm The RM byte.
11292 */
11293FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11294{
11295 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11296 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11297}
11298
11299
11300/**
11301 * Opcode 0xff /4.
11302 * @param bRm The RM byte.
11303 */
11304FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11305{
11306 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11308
11309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11310 {
11311 /* The new RIP is taken from a register. */
11312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11313 switch (pVCpu->iem.s.enmEffOpSize)
11314 {
11315 case IEMMODE_16BIT:
11316 IEM_MC_BEGIN(0, 1);
11317 IEM_MC_LOCAL(uint16_t, u16Target);
11318 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11319 IEM_MC_SET_RIP_U16(u16Target);
11320 IEM_MC_END()
11321 return VINF_SUCCESS;
11322
11323 case IEMMODE_32BIT:
11324 IEM_MC_BEGIN(0, 1);
11325 IEM_MC_LOCAL(uint32_t, u32Target);
11326 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11327 IEM_MC_SET_RIP_U32(u32Target);
11328 IEM_MC_END()
11329 return VINF_SUCCESS;
11330
11331 case IEMMODE_64BIT:
11332 IEM_MC_BEGIN(0, 1);
11333 IEM_MC_LOCAL(uint64_t, u64Target);
11334 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11335 IEM_MC_SET_RIP_U64(u64Target);
11336 IEM_MC_END()
11337 return VINF_SUCCESS;
11338
11339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11340 }
11341 }
11342 else
11343 {
11344 /* The new RIP is taken from a memory location. */
11345 switch (pVCpu->iem.s.enmEffOpSize)
11346 {
11347 case IEMMODE_16BIT:
11348 IEM_MC_BEGIN(0, 2);
11349 IEM_MC_LOCAL(uint16_t, u16Target);
11350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11353 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11354 IEM_MC_SET_RIP_U16(u16Target);
11355 IEM_MC_END()
11356 return VINF_SUCCESS;
11357
11358 case IEMMODE_32BIT:
11359 IEM_MC_BEGIN(0, 2);
11360 IEM_MC_LOCAL(uint32_t, u32Target);
11361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11364 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11365 IEM_MC_SET_RIP_U32(u32Target);
11366 IEM_MC_END()
11367 return VINF_SUCCESS;
11368
11369 case IEMMODE_64BIT:
11370 IEM_MC_BEGIN(0, 2);
11371 IEM_MC_LOCAL(uint64_t, u64Target);
11372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11375 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11376 IEM_MC_SET_RIP_U64(u64Target);
11377 IEM_MC_END()
11378 return VINF_SUCCESS;
11379
11380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11381 }
11382 }
11383}
11384
11385
11386/**
11387 * Opcode 0xff /5.
11388 * @param bRm The RM byte.
11389 */
11390FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11391{
11392 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11393 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11394}
11395
11396
11397/**
11398 * Opcode 0xff /6.
11399 * @param bRm The RM byte.
11400 */
11401FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11402{
11403 IEMOP_MNEMONIC(push_Ev, "push Ev");
11404
11405 /* Registers are handled by a common worker. */
11406 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11407 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11408
11409 /* Memory we do here. */
11410 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11411 switch (pVCpu->iem.s.enmEffOpSize)
11412 {
11413 case IEMMODE_16BIT:
11414 IEM_MC_BEGIN(0, 2);
11415 IEM_MC_LOCAL(uint16_t, u16Src);
11416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11419 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11420 IEM_MC_PUSH_U16(u16Src);
11421 IEM_MC_ADVANCE_RIP();
11422 IEM_MC_END();
11423 return VINF_SUCCESS;
11424
11425 case IEMMODE_32BIT:
11426 IEM_MC_BEGIN(0, 2);
11427 IEM_MC_LOCAL(uint32_t, u32Src);
11428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11431 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11432 IEM_MC_PUSH_U32(u32Src);
11433 IEM_MC_ADVANCE_RIP();
11434 IEM_MC_END();
11435 return VINF_SUCCESS;
11436
11437 case IEMMODE_64BIT:
11438 IEM_MC_BEGIN(0, 2);
11439 IEM_MC_LOCAL(uint64_t, u64Src);
11440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11443 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11444 IEM_MC_PUSH_U64(u64Src);
11445 IEM_MC_ADVANCE_RIP();
11446 IEM_MC_END();
11447 return VINF_SUCCESS;
11448
11449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11450 }
11451}
11452
11453
11454/**
11455 * @opcode 0xff
11456 */
11457FNIEMOP_DEF(iemOp_Grp5)
11458{
11459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11460 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11461 {
11462 case 0:
11463 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11464 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11465 case 1:
11466 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11467 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11468 case 2:
11469 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11470 case 3:
11471 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11472 case 4:
11473 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11474 case 5:
11475 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11476 case 6:
11477 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11478 case 7:
11479 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11480 return IEMOP_RAISE_INVALID_OPCODE();
11481 }
11482 AssertFailedReturn(VERR_IEM_IPE_3);
11483}
11484
11485
11486
11487const PFNIEMOP g_apfnOneByteMap[256] =
11488{
11489 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11490 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11491 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11492 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11493 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11494 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11495 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11496 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11497 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11498 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11499 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11500 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11501 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11502 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11503 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11504 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11505 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11506 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11507 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11508 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11509 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11510 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11511 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11512 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11513 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11514 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11515 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11516 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11517 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11518 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11519 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11520 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11521 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11522 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11523 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11524 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11525 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11526 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11527 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11528 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11529 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11530 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11531 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11532 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11533 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11534 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11535 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11536 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11537 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11538 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11539 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11540 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11541 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11542 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11543 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11544 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11545 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11546 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11547 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11548 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11549 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11550 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11551 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11552 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11553};
11554
11555
11556/** @} */
11557
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette