VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 66124

Last change on this file since 66124 was 66124, checked in by vboxsync, 8 years ago

bs3-cpu-generated-1,IEM: More tests, fixed rAX,Iz testing bug.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 376.8 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 66124 2017-03-16 13:34:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42
43/**
44 * @opcode 0x00
45 * @opmnemonic add
46 * @op1 rm:Eb
47 * @op2 reg:Gb
48 * @opmaps one
49 * @openc ModR/M
50 * @opflmodify of,sf,zf,af,pf,cf
51 * @ophints harmless ignores_op_size
52 * @opstats add_Eb_Gb
53 * @opgroup op_gen_arith_bin
54 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
55 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
56 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
57 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
58 */
59FNIEMOP_DEF(iemOp_add_Eb_Gb)
60{
61 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
62 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
63}
64
65
66/**
67 * @opcode 0x01
68 * @opgroup op_gen_arith_bin
69 * @opflmodify of,sf,zf,af,pf,cf
70 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
71 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
72 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
73 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
74 */
75FNIEMOP_DEF(iemOp_add_Ev_Gv)
76{
77 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
78 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
79}
80
81
82/**
83 * @opcode 0x02
84 * @opgroup op_gen_arith_bin
85 * @opflmodify of,sf,zf,af,pf,cf
86 * @opcopytests iemOp_add_Eb_Gb
87 */
88FNIEMOP_DEF(iemOp_add_Gb_Eb)
89{
90 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
91 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
92}
93
94
95/**
96 * @opcode 0x03
97 * @opgroup op_gen_arith_bin
98 * @opflmodify of,sf,zf,af,pf,cf
99 * @opcopytests iemOp_add_Ev_Gv
100 */
101FNIEMOP_DEF(iemOp_add_Gv_Ev)
102{
103 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
105}
106
107
108/**
109 * @opcode 0x04
110 * @opgroup op_gen_arith_bin
111 * @opflmodify of,sf,zf,af,pf,cf
112 * @opcopytests iemOp_add_Eb_Gb
113 */
114FNIEMOP_DEF(iemOp_add_Al_Ib)
115{
116 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
117 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
118}
119
120
121/**
122 * @opcode 0x05
123 * @opgroup op_gen_arith_bin
124 * @opflmodify of,sf,zf,af,pf,cf
125 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
126 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
127 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
128 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
129 */
130FNIEMOP_DEF(iemOp_add_eAX_Iz)
131{
132 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
133 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
134}
135
136
137/**
138 * @opcode 0x06
139 * @opgroup op_stack_sreg
140 */
141FNIEMOP_DEF(iemOp_push_ES)
142{
143 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
144 IEMOP_HLP_NO_64BIT();
145 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
146}
147
148
149/**
150 * @opcode 0x07
151 * @opgroup op_stack_sreg
152 */
153FNIEMOP_DEF(iemOp_pop_ES)
154{
155 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
156 IEMOP_HLP_NO_64BIT();
157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
158 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
159}
160
161
162/**
163 * @opcode 0x08
164 * @opgroup op_gen_arith_bin
165 * @opflmodify of,sf,zf,af,pf,cf
166 * @opflundef af
167 * @opflclear of,cf
168 */
169FNIEMOP_DEF(iemOp_or_Eb_Gb)
170{
171 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
172 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
173 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
174}
175
176
177/**
178 * @opcode 0x09
179 * @opgroup op_gen_arith_bin
180 * @opflmodify of,sf,zf,af,pf,cf
181 * @opflundef af
182 * @opflclear of,cf
183 */
184FNIEMOP_DEF(iemOp_or_Ev_Gv)
185{
186 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
187 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
188 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
189}
190
191
192/**
193 * @opcode 0x0a
194 * @opgroup op_gen_arith_bin
195 * @opflmodify of,sf,zf,af,pf,cf
196 * @opflundef af
197 * @opflclear of,cf
198 */
199FNIEMOP_DEF(iemOp_or_Gb_Eb)
200{
201 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
203 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
204}
205
206
207/**
208 * @opcode 0x0b
209 * @opgroup op_gen_arith_bin
210 * @opflmodify of,sf,zf,af,pf,cf
211 * @opflundef af
212 * @opflclear of,cf
213 */
214FNIEMOP_DEF(iemOp_or_Gv_Ev)
215{
216 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
218 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
219}
220
221
222/**
223 * @opcode 0x0c
224 * @opgroup op_gen_arith_bin
225 * @opflmodify of,sf,zf,af,pf,cf
226 * @opflundef af
227 * @opflclear of,cf
228 */
229FNIEMOP_DEF(iemOp_or_Al_Ib)
230{
231 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
233 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
234}
235
236
237/**
238 * @opcode 0x0d
239 * @opgroup op_gen_arith_bin
240 * @opflmodify of,sf,zf,af,pf,cf
241 * @opflundef af
242 * @opflclear of,cf
243 */
244FNIEMOP_DEF(iemOp_or_eAX_Iz)
245{
246 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0e
254 * @opgroup op_stack_sreg
255 */
256FNIEMOP_DEF(iemOp_push_CS)
257{
258 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
259 IEMOP_HLP_NO_64BIT();
260 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
261}
262
263
264/**
265 * @opcode 0x0f
266 * @opmnemonic EscTwo0f
267 * @openc two0f
268 * @opdisenum OP_2B_ESC
269 * @ophints harmless
270 * @opgroup op_escapes
271 */
272FNIEMOP_DEF(iemOp_2byteEscape)
273{
274#ifdef VBOX_STRICT
275 /* Sanity check the table the first time around. */
276 static bool s_fTested = false;
277 if (RT_LIKELY(s_fTested)) { /* likely */ }
278 else
279 {
280 s_fTested = true;
281 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
282 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
283 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
284 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
285 }
286#endif
287
288 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
289 {
290 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
291 IEMOP_HLP_MIN_286();
292 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
293 }
294 /* @opdone */
295
296 /*
297 * On the 8086 this is a POP CS instruction.
298 * For the time being we don't specify this this.
299 */
300 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
301 IEMOP_HLP_NO_64BIT();
302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
303 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
304}
305
306/**
307 * @opcode 0x10
308 * @opgroup op_gen_arith_bin
309 * @opfltest cf
310 * @opflmodify of,sf,zf,af,pf,cf
311 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,nv,pl,nz,na,pe
312 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,nv,pl,nz,na,po
313 */
314FNIEMOP_DEF(iemOp_adc_Eb_Gb)
315{
316 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
317 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
318}
319
320
321/**
322 * @opcode 0x11
323 * @opgroup op_gen_arith_bin
324 * @opfltest cf
325 * @opflmodify of,sf,zf,af,pf,cf
326 */
327FNIEMOP_DEF(iemOp_adc_Ev_Gv)
328{
329 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
331}
332
333
334/**
335 * @opcode 0x12
336 * @opgroup op_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify of,sf,zf,af,pf,cf
339 */
340FNIEMOP_DEF(iemOp_adc_Gb_Eb)
341{
342 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
343 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
344}
345
346
347/**
348 * @opcode 0x13
349 * @opgroup op_gen_arith_bin
350 * @opfltest cf
351 * @opflmodify of,sf,zf,af,pf,cf
352 */
353FNIEMOP_DEF(iemOp_adc_Gv_Ev)
354{
355 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
357}
358
359
360/**
361 * @opcode 0x14
362 * @opgroup op_gen_arith_bin
363 * @opfltest cf
364 * @opflmodify of,sf,zf,af,pf,cf
365 */
366FNIEMOP_DEF(iemOp_adc_Al_Ib)
367{
368 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
369 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
370}
371
372
373/**
374 * @opcode 0x15
375 * @opgroup op_gen_arith_bin
376 * @opfltest cf
377 * @opflmodify of,sf,zf,af,pf,cf
378 */
379FNIEMOP_DEF(iemOp_adc_eAX_Iz)
380{
381 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
382 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
383}
384
385
386/**
387 * @opcode 0x16
388 */
389FNIEMOP_DEF(iemOp_push_SS)
390{
391 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
392 IEMOP_HLP_NO_64BIT();
393 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
394}
395
396
397/**
398 * @opcode 0x17
399 * @opgroup op_gen_arith_bin
400 * @opfltest cf
401 * @opflmodify of,sf,zf,af,pf,cf
402 */
403FNIEMOP_DEF(iemOp_pop_SS)
404{
405 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
407 IEMOP_HLP_NO_64BIT();
408 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
409}
410
411
412/**
413 * @opcode 0x18
414 * @opgroup op_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify of,sf,zf,af,pf,cf
417 */
418FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
419{
420 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
421 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
422}
423
424
425/**
426 * @opcode 0x19
427 * @opgroup op_gen_arith_bin
428 * @opfltest cf
429 * @opflmodify of,sf,zf,af,pf,cf
430 */
431FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
432{
433 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
434 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
435}
436
437
438/**
439 * @opcode 0x1a
440 * @opgroup op_gen_arith_bin
441 * @opfltest cf
442 * @opflmodify of,sf,zf,af,pf,cf
443 */
444FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
445{
446 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
447 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
448}
449
450
451/**
452 * @opcode 0x1b
453 * @opgroup op_gen_arith_bin
454 * @opfltest cf
455 * @opflmodify of,sf,zf,af,pf,cf
456 */
457FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
458{
459 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
460 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
461}
462
463
464/**
465 * @opcode 0x1c
466 * @opgroup op_gen_arith_bin
467 * @opfltest cf
468 * @opflmodify of,sf,zf,af,pf,cf
469 */
470FNIEMOP_DEF(iemOp_sbb_Al_Ib)
471{
472 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
474}
475
476
477/**
478 * @opcode 0x1d
479 * @opgroup op_gen_arith_bin
480 * @opfltest cf
481 * @opflmodify of,sf,zf,af,pf,cf
482 */
483FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
484{
485 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
486 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
487}
488
489
490/**
491 * @opcode 0x1e
492 * @opgroup op_stack_sreg
493 */
494FNIEMOP_DEF(iemOp_push_DS)
495{
496 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
497 IEMOP_HLP_NO_64BIT();
498 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
499}
500
501
502/**
503 * @opcode 0x1f
504 * @opgroup op_stack_sreg
505 */
506FNIEMOP_DEF(iemOp_pop_DS)
507{
508 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
510 IEMOP_HLP_NO_64BIT();
511 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
512}
513
514
515/**
516 * @opcode 0x20
517 * @opgroup op_gen_arith_bin
518 * @opflmodify of,sf,zf,af,pf,cf
519 * @opflundef af
520 * @opflclear of,cf
521 */
522FNIEMOP_DEF(iemOp_and_Eb_Gb)
523{
524 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
525 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
527}
528
529
530/**
531 * @opcode 0x21
532 * @opgroup op_gen_arith_bin
533 * @opflmodify of,sf,zf,af,pf,cf
534 * @opflundef af
535 * @opflclear of,cf
536 */
537FNIEMOP_DEF(iemOp_and_Ev_Gv)
538{
539 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
542}
543
544
545/**
546 * @opcode 0x22
547 * @opgroup op_gen_arith_bin
548 * @opflmodify of,sf,zf,af,pf,cf
549 * @opflundef af
550 * @opflclear of,cf
551 */
552FNIEMOP_DEF(iemOp_and_Gb_Eb)
553{
554 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
555 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
556 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
557}
558
559
560/**
561 * @opcode 0x23
562 * @opgroup op_gen_arith_bin
563 * @opflmodify of,sf,zf,af,pf,cf
564 * @opflundef af
565 * @opflclear of,cf
566 */
567FNIEMOP_DEF(iemOp_and_Gv_Ev)
568{
569 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
570 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
571 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
572}
573
574
575/**
576 * @opcode 0x24
577 * @opgroup op_gen_arith_bin
578 * @opflmodify of,sf,zf,af,pf,cf
579 * @opflundef af
580 * @opflclear of,cf
581 */
582FNIEMOP_DEF(iemOp_and_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
585 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
586 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
587}
588
589
590/**
591 * @opcode 0x25
592 * @opgroup op_gen_arith_bin
593 * @opflmodify of,sf,zf,af,pf,cf
594 * @opflundef af
595 * @opflclear of,cf
596 */
597FNIEMOP_DEF(iemOp_and_eAX_Iz)
598{
599 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
600 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
601 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
602}
603
604
605/**
606 * @opcode 0x26
607 * @opmnemonic SEG
608 * @op1 ES
609 * @opgroup op_prefix
610 * @openc prefix
611 * @opdisenum OP_SEG
612 * @ophints harmless
613 */
614FNIEMOP_DEF(iemOp_seg_ES)
615{
616 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
617 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
618 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
619
620 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
621 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
622}
623
624
625/**
626 * @opcode 0x27
627 * @opfltest af,cf
628 * @opflmodify of,sf,zf,af,pf,cf
629 * @opflundef of
630 */
631FNIEMOP_DEF(iemOp_daa)
632{
633 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
634 IEMOP_HLP_NO_64BIT();
635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
636 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
637 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
638}
639
640
641/**
642 * @opcode 0x28
643 * @opgroup op_gen_arith_bin
644 * @opflmodify of,sf,zf,af,pf,cf
645 */
646FNIEMOP_DEF(iemOp_sub_Eb_Gb)
647{
648 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
649 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
650}
651
652
653/**
654 * @opcode 0x29
655 * @opgroup op_gen_arith_bin
656 * @opflmodify of,sf,zf,af,pf,cf
657 */
658FNIEMOP_DEF(iemOp_sub_Ev_Gv)
659{
660 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
661 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
662}
663
664
665/**
666 * @opcode 0x2a
667 * @opgroup op_gen_arith_bin
668 * @opflmodify of,sf,zf,af,pf,cf
669 */
670FNIEMOP_DEF(iemOp_sub_Gb_Eb)
671{
672 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
673 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
674}
675
676
677/**
678 * @opcode 0x2b
679 * @opgroup op_gen_arith_bin
680 * @opflmodify of,sf,zf,af,pf,cf
681 */
682FNIEMOP_DEF(iemOp_sub_Gv_Ev)
683{
684 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
685 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
686}
687
688
689/**
690 * @opcode 0x2c
691 * @opgroup op_gen_arith_bin
692 * @opflmodify of,sf,zf,af,pf,cf
693 */
694FNIEMOP_DEF(iemOp_sub_Al_Ib)
695{
696 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
697 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
698}
699
700
701/**
702 * @opcode 0x2d
703 * @opgroup op_gen_arith_bin
704 * @opflmodify of,sf,zf,af,pf,cf
705 */
706FNIEMOP_DEF(iemOp_sub_eAX_Iz)
707{
708 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
709 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
710}
711
712
713/**
714 * @opcode 0x2e
715 * @opmnemonic SEG
716 * @op1 CS
717 * @opgroup op_prefix
718 * @openc prefix
719 * @opdisenum OP_SEG
720 * @ophints harmless
721 */
722FNIEMOP_DEF(iemOp_seg_CS)
723{
724 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
725 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
726 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
727
728 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
729 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
730}
731
732
733/**
734 * @opcode 0x2f
735 * @opfltest af,cf
736 * @opflmodify of,sf,zf,af,pf,cf
737 * @opflundef of
738 */
739FNIEMOP_DEF(iemOp_das)
740{
741 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
742 IEMOP_HLP_NO_64BIT();
743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
744 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
745 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
746}
747
748
749/**
750 * @opcode 0x30
751 * @opgroup op_gen_arith_bin
752 * @opflmodify of,sf,zf,af,pf,cf
753 * @opflundef af
754 * @opflclear of,cf
755 */
756FNIEMOP_DEF(iemOp_xor_Eb_Gb)
757{
758 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
759 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
760 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
761}
762
763
764/**
765 * @opcode 0x31
766 * @opgroup op_gen_arith_bin
767 * @opflmodify of,sf,zf,af,pf,cf
768 * @opflundef af
769 * @opflclear of,cf
770 */
771FNIEMOP_DEF(iemOp_xor_Ev_Gv)
772{
773 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, 0);
774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
776}
777
778
779/**
780 * @opcode 0x32
781 * @opgroup op_gen_arith_bin
782 * @opflmodify of,sf,zf,af,pf,cf
783 * @opflundef af
784 * @opflclear of,cf
785 */
786FNIEMOP_DEF(iemOp_xor_Gb_Eb)
787{
788 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
789 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
790 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
791}
792
793
794/**
795 * @opcode 0x33
796 * @opgroup op_gen_arith_bin
797 * @opflmodify of,sf,zf,af,pf,cf
798 * @opflundef af
799 * @opflclear of,cf
800 */
801FNIEMOP_DEF(iemOp_xor_Gv_Ev)
802{
803 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
804 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
805 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
806}
807
808
809/**
810 * @opcode 0x34
811 * @opgroup op_gen_arith_bin
812 * @opflmodify of,sf,zf,af,pf,cf
813 * @opflundef af
814 * @opflclear of,cf
815 */
816FNIEMOP_DEF(iemOp_xor_Al_Ib)
817{
818 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
819 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
820 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
821}
822
823
824/**
825 * @opcode 0x35
826 * @opgroup op_gen_arith_bin
827 * @opflmodify of,sf,zf,af,pf,cf
828 * @opflundef af
829 * @opflclear of,cf
830 */
831FNIEMOP_DEF(iemOp_xor_eAX_Iz)
832{
833 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
834 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
835 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
836}
837
838
839/**
840 * @opcode 0x36
841 */
842FNIEMOP_DEF(iemOp_seg_SS)
843{
844 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
845 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
846 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
847
848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
849 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
850}
851
852
853/**
854 * @opcode 0x37
855 */
856FNIEMOP_STUB(iemOp_aaa);
857
858
859/**
860 * @opcode 0x38
861 */
862FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
863{
864 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
865 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
866}
867
868
869/**
870 * @opcode 0x39
871 */
872FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
873{
874 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
876}
877
878
879/**
880 * @opcode 0x3a
881 */
882FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
883{
884 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
886}
887
888
889/**
890 * @opcode 0x3b
891 */
892FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
893{
894 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
895 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
896}
897
898
899/**
900 * @opcode 0x3c
901 */
902FNIEMOP_DEF(iemOp_cmp_Al_Ib)
903{
904 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
906}
907
908
909/**
910 * @opcode 0x3d
911 */
912FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
913{
914 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
915 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
916}
917
918
919/**
920 * @opcode 0x3e
921 */
922FNIEMOP_DEF(iemOp_seg_DS)
923{
924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
926 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
927
928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
930}
931
932
933/**
934 * @opcode 0x3f
935 */
936FNIEMOP_STUB(iemOp_aas);
937
938/**
939 * Common 'inc/dec/not/neg register' helper.
940 */
941FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
942{
943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
944 switch (pVCpu->iem.s.enmEffOpSize)
945 {
946 case IEMMODE_16BIT:
947 IEM_MC_BEGIN(2, 0);
948 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
949 IEM_MC_ARG(uint32_t *, pEFlags, 1);
950 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
951 IEM_MC_REF_EFLAGS(pEFlags);
952 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
953 IEM_MC_ADVANCE_RIP();
954 IEM_MC_END();
955 return VINF_SUCCESS;
956
957 case IEMMODE_32BIT:
958 IEM_MC_BEGIN(2, 0);
959 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
960 IEM_MC_ARG(uint32_t *, pEFlags, 1);
961 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
962 IEM_MC_REF_EFLAGS(pEFlags);
963 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
964 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
965 IEM_MC_ADVANCE_RIP();
966 IEM_MC_END();
967 return VINF_SUCCESS;
968
969 case IEMMODE_64BIT:
970 IEM_MC_BEGIN(2, 0);
971 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
972 IEM_MC_ARG(uint32_t *, pEFlags, 1);
973 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
974 IEM_MC_REF_EFLAGS(pEFlags);
975 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
976 IEM_MC_ADVANCE_RIP();
977 IEM_MC_END();
978 return VINF_SUCCESS;
979 }
980 return VINF_SUCCESS;
981}
982
983
984/**
985 * @opcode 0x40
986 */
987FNIEMOP_DEF(iemOp_inc_eAX)
988{
989 /*
990 * This is a REX prefix in 64-bit mode.
991 */
992 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
993 {
994 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
995 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
996
997 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
998 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
999 }
1000
1001 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1002 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1003}
1004
1005
1006/**
1007 * @opcode 0x41
1008 */
1009FNIEMOP_DEF(iemOp_inc_eCX)
1010{
1011 /*
1012 * This is a REX prefix in 64-bit mode.
1013 */
1014 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1015 {
1016 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1017 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1018 pVCpu->iem.s.uRexB = 1 << 3;
1019
1020 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1021 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1022 }
1023
1024 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1025 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1026}
1027
1028
1029/**
1030 * @opcode 0x42
1031 */
1032FNIEMOP_DEF(iemOp_inc_eDX)
1033{
1034 /*
1035 * This is a REX prefix in 64-bit mode.
1036 */
1037 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1038 {
1039 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1040 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1041 pVCpu->iem.s.uRexIndex = 1 << 3;
1042
1043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1045 }
1046
1047 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1048 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1049}
1050
1051
1052
1053/**
1054 * @opcode 0x43
1055 */
1056FNIEMOP_DEF(iemOp_inc_eBX)
1057{
1058 /*
1059 * This is a REX prefix in 64-bit mode.
1060 */
1061 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1062 {
1063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1065 pVCpu->iem.s.uRexB = 1 << 3;
1066 pVCpu->iem.s.uRexIndex = 1 << 3;
1067
1068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1070 }
1071
1072 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1073 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1074}
1075
1076
1077/**
1078 * @opcode 0x44
1079 */
1080FNIEMOP_DEF(iemOp_inc_eSP)
1081{
1082 /*
1083 * This is a REX prefix in 64-bit mode.
1084 */
1085 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1086 {
1087 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1088 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1089 pVCpu->iem.s.uRexReg = 1 << 3;
1090
1091 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1092 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1093 }
1094
1095 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1096 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1097}
1098
1099
1100/**
1101 * @opcode 0x45
1102 */
1103FNIEMOP_DEF(iemOp_inc_eBP)
1104{
1105 /*
1106 * This is a REX prefix in 64-bit mode.
1107 */
1108 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1109 {
1110 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1111 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1112 pVCpu->iem.s.uRexReg = 1 << 3;
1113 pVCpu->iem.s.uRexB = 1 << 3;
1114
1115 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1116 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1117 }
1118
1119 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1120 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1121}
1122
1123
1124/**
1125 * @opcode 0x46
1126 */
1127FNIEMOP_DEF(iemOp_inc_eSI)
1128{
1129 /*
1130 * This is a REX prefix in 64-bit mode.
1131 */
1132 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1133 {
1134 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1135 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1136 pVCpu->iem.s.uRexReg = 1 << 3;
1137 pVCpu->iem.s.uRexIndex = 1 << 3;
1138
1139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1141 }
1142
1143 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1144 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1145}
1146
1147
1148/**
1149 * @opcode 0x47
1150 */
1151FNIEMOP_DEF(iemOp_inc_eDI)
1152{
1153 /*
1154 * This is a REX prefix in 64-bit mode.
1155 */
1156 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1157 {
1158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1160 pVCpu->iem.s.uRexReg = 1 << 3;
1161 pVCpu->iem.s.uRexB = 1 << 3;
1162 pVCpu->iem.s.uRexIndex = 1 << 3;
1163
1164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1166 }
1167
1168 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1169 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1170}
1171
1172
1173/**
1174 * @opcode 0x48
1175 */
1176FNIEMOP_DEF(iemOp_dec_eAX)
1177{
1178 /*
1179 * This is a REX prefix in 64-bit mode.
1180 */
1181 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1182 {
1183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1185 iemRecalEffOpSize(pVCpu);
1186
1187 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1188 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1189 }
1190
1191 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1192 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1193}
1194
1195
1196/**
1197 * @opcode 0x49
1198 */
1199FNIEMOP_DEF(iemOp_dec_eCX)
1200{
1201 /*
1202 * This is a REX prefix in 64-bit mode.
1203 */
1204 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1205 {
1206 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1207 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1208 pVCpu->iem.s.uRexB = 1 << 3;
1209 iemRecalEffOpSize(pVCpu);
1210
1211 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1212 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1213 }
1214
1215 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1216 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1217}
1218
1219
1220/**
1221 * @opcode 0x4a
1222 */
1223FNIEMOP_DEF(iemOp_dec_eDX)
1224{
1225 /*
1226 * This is a REX prefix in 64-bit mode.
1227 */
1228 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1229 {
1230 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1231 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1232 pVCpu->iem.s.uRexIndex = 1 << 3;
1233 iemRecalEffOpSize(pVCpu);
1234
1235 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1236 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1237 }
1238
1239 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1240 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1241}
1242
1243
1244/**
1245 * @opcode 0x4b
1246 */
1247FNIEMOP_DEF(iemOp_dec_eBX)
1248{
1249 /*
1250 * This is a REX prefix in 64-bit mode.
1251 */
1252 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1253 {
1254 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1255 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1256 pVCpu->iem.s.uRexB = 1 << 3;
1257 pVCpu->iem.s.uRexIndex = 1 << 3;
1258 iemRecalEffOpSize(pVCpu);
1259
1260 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1261 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1262 }
1263
1264 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1265 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1266}
1267
1268
1269/**
1270 * @opcode 0x4c
1271 */
1272FNIEMOP_DEF(iemOp_dec_eSP)
1273{
1274 /*
1275 * This is a REX prefix in 64-bit mode.
1276 */
1277 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1278 {
1279 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1280 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1281 pVCpu->iem.s.uRexReg = 1 << 3;
1282 iemRecalEffOpSize(pVCpu);
1283
1284 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1285 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1286 }
1287
1288 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1289 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1290}
1291
1292
1293/**
1294 * @opcode 0x4d
1295 */
1296FNIEMOP_DEF(iemOp_dec_eBP)
1297{
1298 /*
1299 * This is a REX prefix in 64-bit mode.
1300 */
1301 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1302 {
1303 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1304 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1305 pVCpu->iem.s.uRexReg = 1 << 3;
1306 pVCpu->iem.s.uRexB = 1 << 3;
1307 iemRecalEffOpSize(pVCpu);
1308
1309 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1310 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1311 }
1312
1313 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1314 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1315}
1316
1317
1318/**
1319 * @opcode 0x4e
1320 */
1321FNIEMOP_DEF(iemOp_dec_eSI)
1322{
1323 /*
1324 * This is a REX prefix in 64-bit mode.
1325 */
1326 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1327 {
1328 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1329 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1330 pVCpu->iem.s.uRexReg = 1 << 3;
1331 pVCpu->iem.s.uRexIndex = 1 << 3;
1332 iemRecalEffOpSize(pVCpu);
1333
1334 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1335 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1336 }
1337
1338 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1339 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1340}
1341
1342
1343/**
1344 * @opcode 0x4f
1345 */
1346FNIEMOP_DEF(iemOp_dec_eDI)
1347{
1348 /*
1349 * This is a REX prefix in 64-bit mode.
1350 */
1351 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1352 {
1353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1355 pVCpu->iem.s.uRexReg = 1 << 3;
1356 pVCpu->iem.s.uRexB = 1 << 3;
1357 pVCpu->iem.s.uRexIndex = 1 << 3;
1358 iemRecalEffOpSize(pVCpu);
1359
1360 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1361 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1362 }
1363
1364 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1365 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1366}
1367
1368
1369/**
1370 * Common 'push register' helper.
1371 */
1372FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1373{
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1376 {
1377 iReg |= pVCpu->iem.s.uRexB;
1378 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1379 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1380 }
1381
1382 switch (pVCpu->iem.s.enmEffOpSize)
1383 {
1384 case IEMMODE_16BIT:
1385 IEM_MC_BEGIN(0, 1);
1386 IEM_MC_LOCAL(uint16_t, u16Value);
1387 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1388 IEM_MC_PUSH_U16(u16Value);
1389 IEM_MC_ADVANCE_RIP();
1390 IEM_MC_END();
1391 break;
1392
1393 case IEMMODE_32BIT:
1394 IEM_MC_BEGIN(0, 1);
1395 IEM_MC_LOCAL(uint32_t, u32Value);
1396 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1397 IEM_MC_PUSH_U32(u32Value);
1398 IEM_MC_ADVANCE_RIP();
1399 IEM_MC_END();
1400 break;
1401
1402 case IEMMODE_64BIT:
1403 IEM_MC_BEGIN(0, 1);
1404 IEM_MC_LOCAL(uint64_t, u64Value);
1405 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1406 IEM_MC_PUSH_U64(u64Value);
1407 IEM_MC_ADVANCE_RIP();
1408 IEM_MC_END();
1409 break;
1410 }
1411
1412 return VINF_SUCCESS;
1413}
1414
1415
1416/**
1417 * @opcode 0x50
1418 */
1419FNIEMOP_DEF(iemOp_push_eAX)
1420{
1421 IEMOP_MNEMONIC(push_rAX, "push rAX");
1422 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1423}
1424
1425
1426/**
1427 * @opcode 0x51
1428 */
1429FNIEMOP_DEF(iemOp_push_eCX)
1430{
1431 IEMOP_MNEMONIC(push_rCX, "push rCX");
1432 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1433}
1434
1435
1436/**
1437 * @opcode 0x52
1438 */
1439FNIEMOP_DEF(iemOp_push_eDX)
1440{
1441 IEMOP_MNEMONIC(push_rDX, "push rDX");
1442 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1443}
1444
1445
1446/**
1447 * @opcode 0x53
1448 */
1449FNIEMOP_DEF(iemOp_push_eBX)
1450{
1451 IEMOP_MNEMONIC(push_rBX, "push rBX");
1452 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1453}
1454
1455
1456/**
1457 * @opcode 0x54
1458 */
1459FNIEMOP_DEF(iemOp_push_eSP)
1460{
1461 IEMOP_MNEMONIC(push_rSP, "push rSP");
1462 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1463 {
1464 IEM_MC_BEGIN(0, 1);
1465 IEM_MC_LOCAL(uint16_t, u16Value);
1466 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1467 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1468 IEM_MC_PUSH_U16(u16Value);
1469 IEM_MC_ADVANCE_RIP();
1470 IEM_MC_END();
1471 }
1472 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1473}
1474
1475
1476/**
1477 * @opcode 0x55
1478 */
1479FNIEMOP_DEF(iemOp_push_eBP)
1480{
1481 IEMOP_MNEMONIC(push_rBP, "push rBP");
1482 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1483}
1484
1485
1486/**
1487 * @opcode 0x56
1488 */
1489FNIEMOP_DEF(iemOp_push_eSI)
1490{
1491 IEMOP_MNEMONIC(push_rSI, "push rSI");
1492 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1493}
1494
1495
1496/**
1497 * @opcode 0x57
1498 */
1499FNIEMOP_DEF(iemOp_push_eDI)
1500{
1501 IEMOP_MNEMONIC(push_rDI, "push rDI");
1502 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'pop register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1524 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1525 IEM_MC_POP_U16(pu16Dst);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1533 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1534 IEM_MC_POP_U32(pu32Dst);
1535 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 break;
1539
1540 case IEMMODE_64BIT:
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1543 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1544 IEM_MC_POP_U64(pu64Dst);
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 break;
1548 }
1549
1550 return VINF_SUCCESS;
1551}
1552
1553
1554/**
1555 * @opcode 0x58
1556 */
1557FNIEMOP_DEF(iemOp_pop_eAX)
1558{
1559 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1560 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1561}
1562
1563
1564/**
1565 * @opcode 0x59
1566 */
1567FNIEMOP_DEF(iemOp_pop_eCX)
1568{
1569 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1570 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1571}
1572
1573
1574/**
1575 * @opcode 0x5a
1576 */
1577FNIEMOP_DEF(iemOp_pop_eDX)
1578{
1579 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1580 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1581}
1582
1583
1584/**
1585 * @opcode 0x5b
1586 */
1587FNIEMOP_DEF(iemOp_pop_eBX)
1588{
1589 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1590 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1591}
1592
1593
1594/**
1595 * @opcode 0x5c
1596 */
1597FNIEMOP_DEF(iemOp_pop_eSP)
1598{
1599 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1600 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1601 {
1602 if (pVCpu->iem.s.uRexB)
1603 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1604 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1605 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1606 }
1607
1608 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1609 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1610 /** @todo add testcase for this instruction. */
1611 switch (pVCpu->iem.s.enmEffOpSize)
1612 {
1613 case IEMMODE_16BIT:
1614 IEM_MC_BEGIN(0, 1);
1615 IEM_MC_LOCAL(uint16_t, u16Dst);
1616 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1617 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1618 IEM_MC_ADVANCE_RIP();
1619 IEM_MC_END();
1620 break;
1621
1622 case IEMMODE_32BIT:
1623 IEM_MC_BEGIN(0, 1);
1624 IEM_MC_LOCAL(uint32_t, u32Dst);
1625 IEM_MC_POP_U32(&u32Dst);
1626 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1627 IEM_MC_ADVANCE_RIP();
1628 IEM_MC_END();
1629 break;
1630
1631 case IEMMODE_64BIT:
1632 IEM_MC_BEGIN(0, 1);
1633 IEM_MC_LOCAL(uint64_t, u64Dst);
1634 IEM_MC_POP_U64(&u64Dst);
1635 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1636 IEM_MC_ADVANCE_RIP();
1637 IEM_MC_END();
1638 break;
1639 }
1640
1641 return VINF_SUCCESS;
1642}
1643
1644
1645/**
1646 * @opcode 0x5d
1647 */
1648FNIEMOP_DEF(iemOp_pop_eBP)
1649{
1650 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1651 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1652}
1653
1654
1655/**
1656 * @opcode 0x5e
1657 */
1658FNIEMOP_DEF(iemOp_pop_eSI)
1659{
1660 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1661 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1662}
1663
1664
1665/**
1666 * @opcode 0x5f
1667 */
1668FNIEMOP_DEF(iemOp_pop_eDI)
1669{
1670 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1671 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1672}
1673
1674
1675/**
1676 * @opcode 0x60
1677 */
1678FNIEMOP_DEF(iemOp_pusha)
1679{
1680 IEMOP_MNEMONIC(pusha, "pusha");
1681 IEMOP_HLP_MIN_186();
1682 IEMOP_HLP_NO_64BIT();
1683 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1684 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1685 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1686 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1687}
1688
1689
1690/**
1691 * @opcode 0x61
1692 */
1693FNIEMOP_DEF(iemOp_popa__mvex)
1694{
1695 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1696 {
1697 IEMOP_MNEMONIC(popa, "popa");
1698 IEMOP_HLP_MIN_186();
1699 IEMOP_HLP_NO_64BIT();
1700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1701 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1702 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1703 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1704 }
1705 IEMOP_MNEMONIC(mvex, "mvex");
1706 Log(("mvex prefix is not supported!\n"));
1707 return IEMOP_RAISE_INVALID_OPCODE();
1708}
1709
1710
1711/**
1712 * @opcode 0x62
1713 * @opmnemonic bound
1714 * @op1 Gv
1715 * @op2 Ma
1716 * @opmincpu 80186
1717 * @ophints harmless invalid_64
1718 */
1719FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1720// IEMOP_HLP_MIN_186();
1721
1722
1723/** Opcode 0x63 - non-64-bit modes. */
1724FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1725{
1726 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1727 IEMOP_HLP_MIN_286();
1728 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1730
1731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1732 {
1733 /* Register */
1734 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1735 IEM_MC_BEGIN(3, 0);
1736 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1737 IEM_MC_ARG(uint16_t, u16Src, 1);
1738 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1739
1740 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1741 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1742 IEM_MC_REF_EFLAGS(pEFlags);
1743 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1744
1745 IEM_MC_ADVANCE_RIP();
1746 IEM_MC_END();
1747 }
1748 else
1749 {
1750 /* Memory */
1751 IEM_MC_BEGIN(3, 2);
1752 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1753 IEM_MC_ARG(uint16_t, u16Src, 1);
1754 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1756
1757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1758 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1759 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1760 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1761 IEM_MC_FETCH_EFLAGS(EFlags);
1762 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1763
1764 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1765 IEM_MC_COMMIT_EFLAGS(EFlags);
1766 IEM_MC_ADVANCE_RIP();
1767 IEM_MC_END();
1768 }
1769 return VINF_SUCCESS;
1770
1771}
1772
1773
1774/**
1775 * @opcode 0x63
1776 *
1777 * @note This is a weird one. It works like a regular move instruction if
1778 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1779 * @todo This definitely needs a testcase to verify the odd cases. */
1780FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1781{
1782 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1783
1784 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1786
1787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1788 {
1789 /*
1790 * Register to register.
1791 */
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 IEM_MC_BEGIN(0, 1);
1794 IEM_MC_LOCAL(uint64_t, u64Value);
1795 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1796 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1797 IEM_MC_ADVANCE_RIP();
1798 IEM_MC_END();
1799 }
1800 else
1801 {
1802 /*
1803 * We're loading a register from memory.
1804 */
1805 IEM_MC_BEGIN(0, 2);
1806 IEM_MC_LOCAL(uint64_t, u64Value);
1807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1810 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1811 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1812 IEM_MC_ADVANCE_RIP();
1813 IEM_MC_END();
1814 }
1815 return VINF_SUCCESS;
1816}
1817
1818
1819/**
1820 * @opcode 0x64
1821 * @opmnemonic segfs
1822 * @opmincpu 80386
1823 * @opgroup op_prefixes
1824 */
1825FNIEMOP_DEF(iemOp_seg_FS)
1826{
1827 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1828 IEMOP_HLP_MIN_386();
1829
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1831 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835}
1836
1837
1838/**
1839 * @opcode 0x65
1840 * @opmnemonic seggs
1841 * @opmincpu 80386
1842 * @opgroup op_prefixes
1843 */
1844FNIEMOP_DEF(iemOp_seg_GS)
1845{
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1847 IEMOP_HLP_MIN_386();
1848
1849 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1850 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1851
1852 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1853 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1854}
1855
1856
1857/**
1858 * @opcode 0x66
1859 * @opmnemonic opsize
1860 * @openc prefix
1861 * @opmincpu 80386
1862 * @ophints harmless
1863 * @opgroup op_prefixes
1864 */
1865FNIEMOP_DEF(iemOp_op_size)
1866{
1867 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1868 IEMOP_HLP_MIN_386();
1869
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1871 iemRecalEffOpSize(pVCpu);
1872
1873 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1874 when REPZ or REPNZ are present. */
1875 if (pVCpu->iem.s.idxPrefix == 0)
1876 pVCpu->iem.s.idxPrefix = 1;
1877
1878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1880}
1881
1882
1883/**
1884 * @opcode 0x67
1885 * @opmnemonic addrsize
1886 * @openc prefix
1887 * @opmincpu 80386
1888 * @ophints harmless
1889 * @opgroup op_prefixes
1890 */
1891FNIEMOP_DEF(iemOp_addr_size)
1892{
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1894 IEMOP_HLP_MIN_386();
1895
1896 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1897 switch (pVCpu->iem.s.enmDefAddrMode)
1898 {
1899 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1900 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1901 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1902 default: AssertFailed();
1903 }
1904
1905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1907}
1908
1909
1910/**
1911 * @opcode 0x68
1912 */
1913FNIEMOP_DEF(iemOp_push_Iz)
1914{
1915 IEMOP_MNEMONIC(push_Iz, "push Iz");
1916 IEMOP_HLP_MIN_186();
1917 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1918 switch (pVCpu->iem.s.enmEffOpSize)
1919 {
1920 case IEMMODE_16BIT:
1921 {
1922 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1924 IEM_MC_BEGIN(0,0);
1925 IEM_MC_PUSH_U16(u16Imm);
1926 IEM_MC_ADVANCE_RIP();
1927 IEM_MC_END();
1928 return VINF_SUCCESS;
1929 }
1930
1931 case IEMMODE_32BIT:
1932 {
1933 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1935 IEM_MC_BEGIN(0,0);
1936 IEM_MC_PUSH_U32(u32Imm);
1937 IEM_MC_ADVANCE_RIP();
1938 IEM_MC_END();
1939 return VINF_SUCCESS;
1940 }
1941
1942 case IEMMODE_64BIT:
1943 {
1944 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1946 IEM_MC_BEGIN(0,0);
1947 IEM_MC_PUSH_U64(u64Imm);
1948 IEM_MC_ADVANCE_RIP();
1949 IEM_MC_END();
1950 return VINF_SUCCESS;
1951 }
1952
1953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1954 }
1955}
1956
1957
1958/**
1959 * @opcode 0x69
1960 */
1961FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1962{
1963 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1964 IEMOP_HLP_MIN_186();
1965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1967
1968 switch (pVCpu->iem.s.enmEffOpSize)
1969 {
1970 case IEMMODE_16BIT:
1971 {
1972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1973 {
1974 /* register operand */
1975 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1977
1978 IEM_MC_BEGIN(3, 1);
1979 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1980 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1981 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1982 IEM_MC_LOCAL(uint16_t, u16Tmp);
1983
1984 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1985 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1986 IEM_MC_REF_EFLAGS(pEFlags);
1987 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1988 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1989
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 }
1993 else
1994 {
1995 /* memory operand */
1996 IEM_MC_BEGIN(3, 2);
1997 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1998 IEM_MC_ARG(uint16_t, u16Src, 1);
1999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2000 IEM_MC_LOCAL(uint16_t, u16Tmp);
2001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2002
2003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2004 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2005 IEM_MC_ASSIGN(u16Src, u16Imm);
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2008 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2009 IEM_MC_REF_EFLAGS(pEFlags);
2010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2011 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2012
2013 IEM_MC_ADVANCE_RIP();
2014 IEM_MC_END();
2015 }
2016 return VINF_SUCCESS;
2017 }
2018
2019 case IEMMODE_32BIT:
2020 {
2021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2022 {
2023 /* register operand */
2024 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2026
2027 IEM_MC_BEGIN(3, 1);
2028 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2029 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2030 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2031 IEM_MC_LOCAL(uint32_t, u32Tmp);
2032
2033 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2034 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2035 IEM_MC_REF_EFLAGS(pEFlags);
2036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2037 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2038
2039 IEM_MC_ADVANCE_RIP();
2040 IEM_MC_END();
2041 }
2042 else
2043 {
2044 /* memory operand */
2045 IEM_MC_BEGIN(3, 2);
2046 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2047 IEM_MC_ARG(uint32_t, u32Src, 1);
2048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2049 IEM_MC_LOCAL(uint32_t, u32Tmp);
2050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2051
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2053 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2054 IEM_MC_ASSIGN(u32Src, u32Imm);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2057 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2058 IEM_MC_REF_EFLAGS(pEFlags);
2059 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2060 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2061
2062 IEM_MC_ADVANCE_RIP();
2063 IEM_MC_END();
2064 }
2065 return VINF_SUCCESS;
2066 }
2067
2068 case IEMMODE_64BIT:
2069 {
2070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2071 {
2072 /* register operand */
2073 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2075
2076 IEM_MC_BEGIN(3, 1);
2077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2078 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2079 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2080 IEM_MC_LOCAL(uint64_t, u64Tmp);
2081
2082 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2083 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2084 IEM_MC_REF_EFLAGS(pEFlags);
2085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2086 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2087
2088 IEM_MC_ADVANCE_RIP();
2089 IEM_MC_END();
2090 }
2091 else
2092 {
2093 /* memory operand */
2094 IEM_MC_BEGIN(3, 2);
2095 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2096 IEM_MC_ARG(uint64_t, u64Src, 1);
2097 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2098 IEM_MC_LOCAL(uint64_t, u64Tmp);
2099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2100
2101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2102 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2103 IEM_MC_ASSIGN(u64Src, u64Imm);
2104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2105 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2106 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2107 IEM_MC_REF_EFLAGS(pEFlags);
2108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2109 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2110
2111 IEM_MC_ADVANCE_RIP();
2112 IEM_MC_END();
2113 }
2114 return VINF_SUCCESS;
2115 }
2116 }
2117 AssertFailedReturn(VERR_IEM_IPE_9);
2118}
2119
2120
2121/**
2122 * @opcode 0x6a
2123 */
2124FNIEMOP_DEF(iemOp_push_Ib)
2125{
2126 IEMOP_MNEMONIC(push_Ib, "push Ib");
2127 IEMOP_HLP_MIN_186();
2128 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2131
2132 IEM_MC_BEGIN(0,0);
2133 switch (pVCpu->iem.s.enmEffOpSize)
2134 {
2135 case IEMMODE_16BIT:
2136 IEM_MC_PUSH_U16(i8Imm);
2137 break;
2138 case IEMMODE_32BIT:
2139 IEM_MC_PUSH_U32(i8Imm);
2140 break;
2141 case IEMMODE_64BIT:
2142 IEM_MC_PUSH_U64(i8Imm);
2143 break;
2144 }
2145 IEM_MC_ADVANCE_RIP();
2146 IEM_MC_END();
2147 return VINF_SUCCESS;
2148}
2149
2150
2151/**
2152 * @opcode 0x6b
2153 */
2154FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2155{
2156 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2157 IEMOP_HLP_MIN_186();
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2160
2161 switch (pVCpu->iem.s.enmEffOpSize)
2162 {
2163 case IEMMODE_16BIT:
2164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2165 {
2166 /* register operand */
2167 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2169
2170 IEM_MC_BEGIN(3, 1);
2171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2172 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2174 IEM_MC_LOCAL(uint16_t, u16Tmp);
2175
2176 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2177 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2178 IEM_MC_REF_EFLAGS(pEFlags);
2179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2180 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2181
2182 IEM_MC_ADVANCE_RIP();
2183 IEM_MC_END();
2184 }
2185 else
2186 {
2187 /* memory operand */
2188 IEM_MC_BEGIN(3, 2);
2189 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2190 IEM_MC_ARG(uint16_t, u16Src, 1);
2191 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2192 IEM_MC_LOCAL(uint16_t, u16Tmp);
2193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2194
2195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2196 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2197 IEM_MC_ASSIGN(u16Src, u16Imm);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2200 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2201 IEM_MC_REF_EFLAGS(pEFlags);
2202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2203 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2204
2205 IEM_MC_ADVANCE_RIP();
2206 IEM_MC_END();
2207 }
2208 return VINF_SUCCESS;
2209
2210 case IEMMODE_32BIT:
2211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2212 {
2213 /* register operand */
2214 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216
2217 IEM_MC_BEGIN(3, 1);
2218 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2219 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2220 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2221 IEM_MC_LOCAL(uint32_t, u32Tmp);
2222
2223 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2224 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2225 IEM_MC_REF_EFLAGS(pEFlags);
2226 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2227 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2228
2229 IEM_MC_ADVANCE_RIP();
2230 IEM_MC_END();
2231 }
2232 else
2233 {
2234 /* memory operand */
2235 IEM_MC_BEGIN(3, 2);
2236 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2237 IEM_MC_ARG(uint32_t, u32Src, 1);
2238 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2239 IEM_MC_LOCAL(uint32_t, u32Tmp);
2240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2241
2242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2243 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2244 IEM_MC_ASSIGN(u32Src, u32Imm);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2246 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2247 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2248 IEM_MC_REF_EFLAGS(pEFlags);
2249 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2250 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2251
2252 IEM_MC_ADVANCE_RIP();
2253 IEM_MC_END();
2254 }
2255 return VINF_SUCCESS;
2256
2257 case IEMMODE_64BIT:
2258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2259 {
2260 /* register operand */
2261 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263
2264 IEM_MC_BEGIN(3, 1);
2265 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2266 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2268 IEM_MC_LOCAL(uint64_t, u64Tmp);
2269
2270 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2271 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2272 IEM_MC_REF_EFLAGS(pEFlags);
2273 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2274 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2275
2276 IEM_MC_ADVANCE_RIP();
2277 IEM_MC_END();
2278 }
2279 else
2280 {
2281 /* memory operand */
2282 IEM_MC_BEGIN(3, 2);
2283 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2284 IEM_MC_ARG(uint64_t, u64Src, 1);
2285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2286 IEM_MC_LOCAL(uint64_t, u64Tmp);
2287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2288
2289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2290 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2291 IEM_MC_ASSIGN(u64Src, u64Imm);
2292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2293 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2294 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2295 IEM_MC_REF_EFLAGS(pEFlags);
2296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2297 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2298
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 return VINF_SUCCESS;
2303 }
2304 AssertFailedReturn(VERR_IEM_IPE_8);
2305}
2306
2307
2308/**
2309 * @opcode 0x6c
2310 */
2311FNIEMOP_DEF(iemOp_insb_Yb_DX)
2312{
2313 IEMOP_HLP_MIN_186();
2314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2315 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2316 {
2317 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2318 switch (pVCpu->iem.s.enmEffAddrMode)
2319 {
2320 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2321 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2322 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2324 }
2325 }
2326 else
2327 {
2328 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2329 switch (pVCpu->iem.s.enmEffAddrMode)
2330 {
2331 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2332 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2333 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2335 }
2336 }
2337}
2338
2339
2340/**
2341 * @opcode 0x6d
2342 */
2343FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2344{
2345 IEMOP_HLP_MIN_186();
2346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2347 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2348 {
2349 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2350 switch (pVCpu->iem.s.enmEffOpSize)
2351 {
2352 case IEMMODE_16BIT:
2353 switch (pVCpu->iem.s.enmEffAddrMode)
2354 {
2355 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2356 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2357 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2359 }
2360 break;
2361 case IEMMODE_64BIT:
2362 case IEMMODE_32BIT:
2363 switch (pVCpu->iem.s.enmEffAddrMode)
2364 {
2365 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2366 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2367 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2369 }
2370 break;
2371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2372 }
2373 }
2374 else
2375 {
2376 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2377 switch (pVCpu->iem.s.enmEffOpSize)
2378 {
2379 case IEMMODE_16BIT:
2380 switch (pVCpu->iem.s.enmEffAddrMode)
2381 {
2382 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2383 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2384 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2386 }
2387 break;
2388 case IEMMODE_64BIT:
2389 case IEMMODE_32BIT:
2390 switch (pVCpu->iem.s.enmEffAddrMode)
2391 {
2392 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2393 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2394 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2396 }
2397 break;
2398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2399 }
2400 }
2401}
2402
2403
2404/**
2405 * @opcode 0x6e
2406 */
2407FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2408{
2409 IEMOP_HLP_MIN_186();
2410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2411 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2412 {
2413 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2414 switch (pVCpu->iem.s.enmEffAddrMode)
2415 {
2416 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2417 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2418 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2420 }
2421 }
2422 else
2423 {
2424 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2425 switch (pVCpu->iem.s.enmEffAddrMode)
2426 {
2427 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2428 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2429 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2431 }
2432 }
2433}
2434
2435
2436/**
2437 * @opcode 0x6f
2438 */
2439FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2440{
2441 IEMOP_HLP_MIN_186();
2442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2443 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2444 {
2445 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2446 switch (pVCpu->iem.s.enmEffOpSize)
2447 {
2448 case IEMMODE_16BIT:
2449 switch (pVCpu->iem.s.enmEffAddrMode)
2450 {
2451 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2452 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2453 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2455 }
2456 break;
2457 case IEMMODE_64BIT:
2458 case IEMMODE_32BIT:
2459 switch (pVCpu->iem.s.enmEffAddrMode)
2460 {
2461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2465 }
2466 break;
2467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2468 }
2469 }
2470 else
2471 {
2472 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2473 switch (pVCpu->iem.s.enmEffOpSize)
2474 {
2475 case IEMMODE_16BIT:
2476 switch (pVCpu->iem.s.enmEffAddrMode)
2477 {
2478 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2479 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2480 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2482 }
2483 break;
2484 case IEMMODE_64BIT:
2485 case IEMMODE_32BIT:
2486 switch (pVCpu->iem.s.enmEffAddrMode)
2487 {
2488 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2489 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2490 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2492 }
2493 break;
2494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2495 }
2496 }
2497}
2498
2499
2500/**
2501 * @opcode 0x70
2502 */
2503FNIEMOP_DEF(iemOp_jo_Jb)
2504{
2505 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2506 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2509
2510 IEM_MC_BEGIN(0, 0);
2511 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2512 IEM_MC_REL_JMP_S8(i8Imm);
2513 } IEM_MC_ELSE() {
2514 IEM_MC_ADVANCE_RIP();
2515 } IEM_MC_ENDIF();
2516 IEM_MC_END();
2517 return VINF_SUCCESS;
2518}
2519
2520
2521/**
2522 * @opcode 0x71
2523 */
2524FNIEMOP_DEF(iemOp_jno_Jb)
2525{
2526 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2527 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2530
2531 IEM_MC_BEGIN(0, 0);
2532 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2533 IEM_MC_ADVANCE_RIP();
2534 } IEM_MC_ELSE() {
2535 IEM_MC_REL_JMP_S8(i8Imm);
2536 } IEM_MC_ENDIF();
2537 IEM_MC_END();
2538 return VINF_SUCCESS;
2539}
2540
2541/**
2542 * @opcode 0x72
2543 */
2544FNIEMOP_DEF(iemOp_jc_Jb)
2545{
2546 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2547 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2550
2551 IEM_MC_BEGIN(0, 0);
2552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2553 IEM_MC_REL_JMP_S8(i8Imm);
2554 } IEM_MC_ELSE() {
2555 IEM_MC_ADVANCE_RIP();
2556 } IEM_MC_ENDIF();
2557 IEM_MC_END();
2558 return VINF_SUCCESS;
2559}
2560
2561
2562/**
2563 * @opcode 0x73
2564 */
2565FNIEMOP_DEF(iemOp_jnc_Jb)
2566{
2567 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2568 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2571
2572 IEM_MC_BEGIN(0, 0);
2573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2574 IEM_MC_ADVANCE_RIP();
2575 } IEM_MC_ELSE() {
2576 IEM_MC_REL_JMP_S8(i8Imm);
2577 } IEM_MC_ENDIF();
2578 IEM_MC_END();
2579 return VINF_SUCCESS;
2580}
2581
2582
2583/**
2584 * @opcode 0x74
2585 */
2586FNIEMOP_DEF(iemOp_je_Jb)
2587{
2588 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2592
2593 IEM_MC_BEGIN(0, 0);
2594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2595 IEM_MC_REL_JMP_S8(i8Imm);
2596 } IEM_MC_ELSE() {
2597 IEM_MC_ADVANCE_RIP();
2598 } IEM_MC_ENDIF();
2599 IEM_MC_END();
2600 return VINF_SUCCESS;
2601}
2602
2603
2604/**
2605 * @opcode 0x75
2606 */
2607FNIEMOP_DEF(iemOp_jne_Jb)
2608{
2609 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2613
2614 IEM_MC_BEGIN(0, 0);
2615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2616 IEM_MC_ADVANCE_RIP();
2617 } IEM_MC_ELSE() {
2618 IEM_MC_REL_JMP_S8(i8Imm);
2619 } IEM_MC_ENDIF();
2620 IEM_MC_END();
2621 return VINF_SUCCESS;
2622}
2623
2624
2625/**
2626 * @opcode 0x76
2627 */
2628FNIEMOP_DEF(iemOp_jbe_Jb)
2629{
2630 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2634
2635 IEM_MC_BEGIN(0, 0);
2636 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2637 IEM_MC_REL_JMP_S8(i8Imm);
2638 } IEM_MC_ELSE() {
2639 IEM_MC_ADVANCE_RIP();
2640 } IEM_MC_ENDIF();
2641 IEM_MC_END();
2642 return VINF_SUCCESS;
2643}
2644
2645
2646/**
2647 * @opcode 0x77
2648 */
2649FNIEMOP_DEF(iemOp_jnbe_Jb)
2650{
2651 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2655
2656 IEM_MC_BEGIN(0, 0);
2657 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2658 IEM_MC_ADVANCE_RIP();
2659 } IEM_MC_ELSE() {
2660 IEM_MC_REL_JMP_S8(i8Imm);
2661 } IEM_MC_ENDIF();
2662 IEM_MC_END();
2663 return VINF_SUCCESS;
2664}
2665
2666
2667/**
2668 * @opcode 0x78
2669 */
2670FNIEMOP_DEF(iemOp_js_Jb)
2671{
2672 IEMOP_MNEMONIC(js_Jb, "js Jb");
2673 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2675 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2676
2677 IEM_MC_BEGIN(0, 0);
2678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2679 IEM_MC_REL_JMP_S8(i8Imm);
2680 } IEM_MC_ELSE() {
2681 IEM_MC_ADVANCE_RIP();
2682 } IEM_MC_ENDIF();
2683 IEM_MC_END();
2684 return VINF_SUCCESS;
2685}
2686
2687
2688/**
2689 * @opcode 0x79
2690 */
2691FNIEMOP_DEF(iemOp_jns_Jb)
2692{
2693 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2697
2698 IEM_MC_BEGIN(0, 0);
2699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2700 IEM_MC_ADVANCE_RIP();
2701 } IEM_MC_ELSE() {
2702 IEM_MC_REL_JMP_S8(i8Imm);
2703 } IEM_MC_ENDIF();
2704 IEM_MC_END();
2705 return VINF_SUCCESS;
2706}
2707
2708
2709/**
2710 * @opcode 0x7a
2711 */
2712FNIEMOP_DEF(iemOp_jp_Jb)
2713{
2714 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2717 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2718
2719 IEM_MC_BEGIN(0, 0);
2720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2721 IEM_MC_REL_JMP_S8(i8Imm);
2722 } IEM_MC_ELSE() {
2723 IEM_MC_ADVANCE_RIP();
2724 } IEM_MC_ENDIF();
2725 IEM_MC_END();
2726 return VINF_SUCCESS;
2727}
2728
2729
2730/**
2731 * @opcode 0x7b
2732 */
2733FNIEMOP_DEF(iemOp_jnp_Jb)
2734{
2735 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2736 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2739
2740 IEM_MC_BEGIN(0, 0);
2741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2742 IEM_MC_ADVANCE_RIP();
2743 } IEM_MC_ELSE() {
2744 IEM_MC_REL_JMP_S8(i8Imm);
2745 } IEM_MC_ENDIF();
2746 IEM_MC_END();
2747 return VINF_SUCCESS;
2748}
2749
2750
2751/**
2752 * @opcode 0x7c
2753 */
2754FNIEMOP_DEF(iemOp_jl_Jb)
2755{
2756 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2759 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2760
2761 IEM_MC_BEGIN(0, 0);
2762 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2763 IEM_MC_REL_JMP_S8(i8Imm);
2764 } IEM_MC_ELSE() {
2765 IEM_MC_ADVANCE_RIP();
2766 } IEM_MC_ENDIF();
2767 IEM_MC_END();
2768 return VINF_SUCCESS;
2769}
2770
2771
2772/**
2773 * @opcode 0x7d
2774 */
2775FNIEMOP_DEF(iemOp_jnl_Jb)
2776{
2777 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2780 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2781
2782 IEM_MC_BEGIN(0, 0);
2783 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2784 IEM_MC_ADVANCE_RIP();
2785 } IEM_MC_ELSE() {
2786 IEM_MC_REL_JMP_S8(i8Imm);
2787 } IEM_MC_ENDIF();
2788 IEM_MC_END();
2789 return VINF_SUCCESS;
2790}
2791
2792
2793/**
2794 * @opcode 0x7e
2795 */
2796FNIEMOP_DEF(iemOp_jle_Jb)
2797{
2798 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2802
2803 IEM_MC_BEGIN(0, 0);
2804 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2805 IEM_MC_REL_JMP_S8(i8Imm);
2806 } IEM_MC_ELSE() {
2807 IEM_MC_ADVANCE_RIP();
2808 } IEM_MC_ENDIF();
2809 IEM_MC_END();
2810 return VINF_SUCCESS;
2811}
2812
2813
2814/**
2815 * @opcode 0x7f
2816 */
2817FNIEMOP_DEF(iemOp_jnle_Jb)
2818{
2819 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2823
2824 IEM_MC_BEGIN(0, 0);
2825 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2826 IEM_MC_ADVANCE_RIP();
2827 } IEM_MC_ELSE() {
2828 IEM_MC_REL_JMP_S8(i8Imm);
2829 } IEM_MC_ENDIF();
2830 IEM_MC_END();
2831 return VINF_SUCCESS;
2832}
2833
2834
2835/**
2836 * @opcode 0x80
2837 */
2838FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2839{
2840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2841 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2842 {
2843 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2844 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2845 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2846 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2847 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2848 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2849 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2850 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2851 }
2852 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2853
2854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2855 {
2856 /* register target */
2857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2859 IEM_MC_BEGIN(3, 0);
2860 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2861 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2862 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2863
2864 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2865 IEM_MC_REF_EFLAGS(pEFlags);
2866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2867
2868 IEM_MC_ADVANCE_RIP();
2869 IEM_MC_END();
2870 }
2871 else
2872 {
2873 /* memory target */
2874 uint32_t fAccess;
2875 if (pImpl->pfnLockedU8)
2876 fAccess = IEM_ACCESS_DATA_RW;
2877 else /* CMP */
2878 fAccess = IEM_ACCESS_DATA_R;
2879 IEM_MC_BEGIN(3, 2);
2880 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2883
2884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2885 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2886 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2887 if (pImpl->pfnLockedU8)
2888 IEMOP_HLP_DONE_DECODING();
2889 else
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891
2892 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2893 IEM_MC_FETCH_EFLAGS(EFlags);
2894 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2895 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2896 else
2897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2898
2899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2900 IEM_MC_COMMIT_EFLAGS(EFlags);
2901 IEM_MC_ADVANCE_RIP();
2902 IEM_MC_END();
2903 }
2904 return VINF_SUCCESS;
2905}
2906
2907
2908/**
2909 * @opcode 0x81
2910 */
2911FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2912{
2913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2914 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2915 {
2916 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2917 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2918 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2919 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2920 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2921 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2922 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2923 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2924 }
2925 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2926
2927 switch (pVCpu->iem.s.enmEffOpSize)
2928 {
2929 case IEMMODE_16BIT:
2930 {
2931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2932 {
2933 /* register target */
2934 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2938 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2940
2941 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2942 IEM_MC_REF_EFLAGS(pEFlags);
2943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2944
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /* memory target */
2951 uint32_t fAccess;
2952 if (pImpl->pfnLockedU16)
2953 fAccess = IEM_ACCESS_DATA_RW;
2954 else /* CMP, TEST */
2955 fAccess = IEM_ACCESS_DATA_R;
2956 IEM_MC_BEGIN(3, 2);
2957 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2958 IEM_MC_ARG(uint16_t, u16Src, 1);
2959 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2961
2962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2963 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2964 IEM_MC_ASSIGN(u16Src, u16Imm);
2965 if (pImpl->pfnLockedU16)
2966 IEMOP_HLP_DONE_DECODING();
2967 else
2968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2969 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2970 IEM_MC_FETCH_EFLAGS(EFlags);
2971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2973 else
2974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2975
2976 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2977 IEM_MC_COMMIT_EFLAGS(EFlags);
2978 IEM_MC_ADVANCE_RIP();
2979 IEM_MC_END();
2980 }
2981 break;
2982 }
2983
2984 case IEMMODE_32BIT:
2985 {
2986 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2987 {
2988 /* register target */
2989 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2991 IEM_MC_BEGIN(3, 0);
2992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2993 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2995
2996 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2997 IEM_MC_REF_EFLAGS(pEFlags);
2998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2999 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3000
3001 IEM_MC_ADVANCE_RIP();
3002 IEM_MC_END();
3003 }
3004 else
3005 {
3006 /* memory target */
3007 uint32_t fAccess;
3008 if (pImpl->pfnLockedU32)
3009 fAccess = IEM_ACCESS_DATA_RW;
3010 else /* CMP, TEST */
3011 fAccess = IEM_ACCESS_DATA_R;
3012 IEM_MC_BEGIN(3, 2);
3013 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3014 IEM_MC_ARG(uint32_t, u32Src, 1);
3015 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3017
3018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3019 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3020 IEM_MC_ASSIGN(u32Src, u32Imm);
3021 if (pImpl->pfnLockedU32)
3022 IEMOP_HLP_DONE_DECODING();
3023 else
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3026 IEM_MC_FETCH_EFLAGS(EFlags);
3027 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3029 else
3030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3031
3032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3033 IEM_MC_COMMIT_EFLAGS(EFlags);
3034 IEM_MC_ADVANCE_RIP();
3035 IEM_MC_END();
3036 }
3037 break;
3038 }
3039
3040 case IEMMODE_64BIT:
3041 {
3042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3043 {
3044 /* register target */
3045 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3047 IEM_MC_BEGIN(3, 0);
3048 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3049 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3051
3052 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3053 IEM_MC_REF_EFLAGS(pEFlags);
3054 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3055
3056 IEM_MC_ADVANCE_RIP();
3057 IEM_MC_END();
3058 }
3059 else
3060 {
3061 /* memory target */
3062 uint32_t fAccess;
3063 if (pImpl->pfnLockedU64)
3064 fAccess = IEM_ACCESS_DATA_RW;
3065 else /* CMP */
3066 fAccess = IEM_ACCESS_DATA_R;
3067 IEM_MC_BEGIN(3, 2);
3068 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3069 IEM_MC_ARG(uint64_t, u64Src, 1);
3070 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3072
3073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3074 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3075 if (pImpl->pfnLockedU64)
3076 IEMOP_HLP_DONE_DECODING();
3077 else
3078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3079 IEM_MC_ASSIGN(u64Src, u64Imm);
3080 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3081 IEM_MC_FETCH_EFLAGS(EFlags);
3082 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3083 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3084 else
3085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3086
3087 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3088 IEM_MC_COMMIT_EFLAGS(EFlags);
3089 IEM_MC_ADVANCE_RIP();
3090 IEM_MC_END();
3091 }
3092 break;
3093 }
3094 }
3095 return VINF_SUCCESS;
3096}
3097
3098
3099/**
3100 * @opcode 0x82
3101 * @opmnemonic grp1_82
3102 * @opgroup op_groups
3103 */
3104FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3105{
3106 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3107 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3108}
3109
3110
3111/**
3112 * @opcode 0x83
3113 */
3114FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3115{
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3118 {
3119 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3120 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3121 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3122 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3123 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3124 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3125 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3126 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3127 }
3128 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3129 to the 386 even if absent in the intel reference manuals and some
3130 3rd party opcode listings. */
3131 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3132
3133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3134 {
3135 /*
3136 * Register target
3137 */
3138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3139 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3140 switch (pVCpu->iem.s.enmEffOpSize)
3141 {
3142 case IEMMODE_16BIT:
3143 {
3144 IEM_MC_BEGIN(3, 0);
3145 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3146 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3147 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3148
3149 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3150 IEM_MC_REF_EFLAGS(pEFlags);
3151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3152
3153 IEM_MC_ADVANCE_RIP();
3154 IEM_MC_END();
3155 break;
3156 }
3157
3158 case IEMMODE_32BIT:
3159 {
3160 IEM_MC_BEGIN(3, 0);
3161 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3162 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3163 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3164
3165 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3166 IEM_MC_REF_EFLAGS(pEFlags);
3167 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3168 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3169
3170 IEM_MC_ADVANCE_RIP();
3171 IEM_MC_END();
3172 break;
3173 }
3174
3175 case IEMMODE_64BIT:
3176 {
3177 IEM_MC_BEGIN(3, 0);
3178 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3179 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181
3182 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3183 IEM_MC_REF_EFLAGS(pEFlags);
3184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3185
3186 IEM_MC_ADVANCE_RIP();
3187 IEM_MC_END();
3188 break;
3189 }
3190 }
3191 }
3192 else
3193 {
3194 /*
3195 * Memory target.
3196 */
3197 uint32_t fAccess;
3198 if (pImpl->pfnLockedU16)
3199 fAccess = IEM_ACCESS_DATA_RW;
3200 else /* CMP */
3201 fAccess = IEM_ACCESS_DATA_R;
3202
3203 switch (pVCpu->iem.s.enmEffOpSize)
3204 {
3205 case IEMMODE_16BIT:
3206 {
3207 IEM_MC_BEGIN(3, 2);
3208 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3209 IEM_MC_ARG(uint16_t, u16Src, 1);
3210 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3212
3213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3214 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3215 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3216 if (pImpl->pfnLockedU16)
3217 IEMOP_HLP_DONE_DECODING();
3218 else
3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3220 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3221 IEM_MC_FETCH_EFLAGS(EFlags);
3222 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3223 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3224 else
3225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3226
3227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3228 IEM_MC_COMMIT_EFLAGS(EFlags);
3229 IEM_MC_ADVANCE_RIP();
3230 IEM_MC_END();
3231 break;
3232 }
3233
3234 case IEMMODE_32BIT:
3235 {
3236 IEM_MC_BEGIN(3, 2);
3237 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3238 IEM_MC_ARG(uint32_t, u32Src, 1);
3239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3241
3242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3243 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3244 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3245 if (pImpl->pfnLockedU32)
3246 IEMOP_HLP_DONE_DECODING();
3247 else
3248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3249 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3250 IEM_MC_FETCH_EFLAGS(EFlags);
3251 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3253 else
3254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3255
3256 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3257 IEM_MC_COMMIT_EFLAGS(EFlags);
3258 IEM_MC_ADVANCE_RIP();
3259 IEM_MC_END();
3260 break;
3261 }
3262
3263 case IEMMODE_64BIT:
3264 {
3265 IEM_MC_BEGIN(3, 2);
3266 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3267 IEM_MC_ARG(uint64_t, u64Src, 1);
3268 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3270
3271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3272 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3273 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3274 if (pImpl->pfnLockedU64)
3275 IEMOP_HLP_DONE_DECODING();
3276 else
3277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3278 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3279 IEM_MC_FETCH_EFLAGS(EFlags);
3280 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3282 else
3283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3284
3285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3286 IEM_MC_COMMIT_EFLAGS(EFlags);
3287 IEM_MC_ADVANCE_RIP();
3288 IEM_MC_END();
3289 break;
3290 }
3291 }
3292 }
3293 return VINF_SUCCESS;
3294}
3295
3296
3297/**
3298 * @opcode 0x84
3299 */
3300FNIEMOP_DEF(iemOp_test_Eb_Gb)
3301{
3302 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3304 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3305}
3306
3307
3308/**
3309 * @opcode 0x85
3310 */
3311FNIEMOP_DEF(iemOp_test_Ev_Gv)
3312{
3313 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3314 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3315 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3316}
3317
3318
3319/**
3320 * @opcode 0x86
3321 */
3322FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3323{
3324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3325 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3326
3327 /*
3328 * If rm is denoting a register, no more instruction bytes.
3329 */
3330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3331 {
3332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3333
3334 IEM_MC_BEGIN(0, 2);
3335 IEM_MC_LOCAL(uint8_t, uTmp1);
3336 IEM_MC_LOCAL(uint8_t, uTmp2);
3337
3338 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3339 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3340 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3341 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3342
3343 IEM_MC_ADVANCE_RIP();
3344 IEM_MC_END();
3345 }
3346 else
3347 {
3348 /*
3349 * We're accessing memory.
3350 */
3351/** @todo the register must be committed separately! */
3352 IEM_MC_BEGIN(2, 2);
3353 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3354 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3356
3357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3358 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3359 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3360 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3361 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3362
3363 IEM_MC_ADVANCE_RIP();
3364 IEM_MC_END();
3365 }
3366 return VINF_SUCCESS;
3367}
3368
3369
3370/**
3371 * @opcode 0x87
3372 */
3373FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3374{
3375 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377
3378 /*
3379 * If rm is denoting a register, no more instruction bytes.
3380 */
3381 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3382 {
3383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3384
3385 switch (pVCpu->iem.s.enmEffOpSize)
3386 {
3387 case IEMMODE_16BIT:
3388 IEM_MC_BEGIN(0, 2);
3389 IEM_MC_LOCAL(uint16_t, uTmp1);
3390 IEM_MC_LOCAL(uint16_t, uTmp2);
3391
3392 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3393 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3394 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3395 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3396
3397 IEM_MC_ADVANCE_RIP();
3398 IEM_MC_END();
3399 return VINF_SUCCESS;
3400
3401 case IEMMODE_32BIT:
3402 IEM_MC_BEGIN(0, 2);
3403 IEM_MC_LOCAL(uint32_t, uTmp1);
3404 IEM_MC_LOCAL(uint32_t, uTmp2);
3405
3406 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3407 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3408 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3409 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3410
3411 IEM_MC_ADVANCE_RIP();
3412 IEM_MC_END();
3413 return VINF_SUCCESS;
3414
3415 case IEMMODE_64BIT:
3416 IEM_MC_BEGIN(0, 2);
3417 IEM_MC_LOCAL(uint64_t, uTmp1);
3418 IEM_MC_LOCAL(uint64_t, uTmp2);
3419
3420 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3421 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3422 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3423 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3424
3425 IEM_MC_ADVANCE_RIP();
3426 IEM_MC_END();
3427 return VINF_SUCCESS;
3428
3429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3430 }
3431 }
3432 else
3433 {
3434 /*
3435 * We're accessing memory.
3436 */
3437 switch (pVCpu->iem.s.enmEffOpSize)
3438 {
3439/** @todo the register must be committed separately! */
3440 case IEMMODE_16BIT:
3441 IEM_MC_BEGIN(2, 2);
3442 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3443 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3445
3446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3447 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3448 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3449 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3450 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 return VINF_SUCCESS;
3455
3456 case IEMMODE_32BIT:
3457 IEM_MC_BEGIN(2, 2);
3458 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3459 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3461
3462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3463 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3464 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3465 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3466 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3467
3468 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3469 IEM_MC_ADVANCE_RIP();
3470 IEM_MC_END();
3471 return VINF_SUCCESS;
3472
3473 case IEMMODE_64BIT:
3474 IEM_MC_BEGIN(2, 2);
3475 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3476 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3480 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3481 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3482 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3484
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 return VINF_SUCCESS;
3488
3489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3490 }
3491 }
3492}
3493
3494
3495/**
3496 * @opcode 0x88
3497 */
3498FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3499{
3500 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3501
3502 uint8_t bRm;
3503 IEM_OPCODE_GET_NEXT_U8(&bRm);
3504
3505 /*
3506 * If rm is denoting a register, no more instruction bytes.
3507 */
3508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3509 {
3510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3511 IEM_MC_BEGIN(0, 1);
3512 IEM_MC_LOCAL(uint8_t, u8Value);
3513 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3514 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3515 IEM_MC_ADVANCE_RIP();
3516 IEM_MC_END();
3517 }
3518 else
3519 {
3520 /*
3521 * We're writing a register to memory.
3522 */
3523 IEM_MC_BEGIN(0, 2);
3524 IEM_MC_LOCAL(uint8_t, u8Value);
3525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3528 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3529 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3530 IEM_MC_ADVANCE_RIP();
3531 IEM_MC_END();
3532 }
3533 return VINF_SUCCESS;
3534
3535}
3536
3537
3538/**
3539 * @opcode 0x89
3540 */
3541FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3542{
3543 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3544
3545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3546
3547 /*
3548 * If rm is denoting a register, no more instruction bytes.
3549 */
3550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3551 {
3552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3553 switch (pVCpu->iem.s.enmEffOpSize)
3554 {
3555 case IEMMODE_16BIT:
3556 IEM_MC_BEGIN(0, 1);
3557 IEM_MC_LOCAL(uint16_t, u16Value);
3558 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3559 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3560 IEM_MC_ADVANCE_RIP();
3561 IEM_MC_END();
3562 break;
3563
3564 case IEMMODE_32BIT:
3565 IEM_MC_BEGIN(0, 1);
3566 IEM_MC_LOCAL(uint32_t, u32Value);
3567 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3568 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3569 IEM_MC_ADVANCE_RIP();
3570 IEM_MC_END();
3571 break;
3572
3573 case IEMMODE_64BIT:
3574 IEM_MC_BEGIN(0, 1);
3575 IEM_MC_LOCAL(uint64_t, u64Value);
3576 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3577 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3578 IEM_MC_ADVANCE_RIP();
3579 IEM_MC_END();
3580 break;
3581 }
3582 }
3583 else
3584 {
3585 /*
3586 * We're writing a register to memory.
3587 */
3588 switch (pVCpu->iem.s.enmEffOpSize)
3589 {
3590 case IEMMODE_16BIT:
3591 IEM_MC_BEGIN(0, 2);
3592 IEM_MC_LOCAL(uint16_t, u16Value);
3593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3597 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3598 IEM_MC_ADVANCE_RIP();
3599 IEM_MC_END();
3600 break;
3601
3602 case IEMMODE_32BIT:
3603 IEM_MC_BEGIN(0, 2);
3604 IEM_MC_LOCAL(uint32_t, u32Value);
3605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3608 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3609 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3610 IEM_MC_ADVANCE_RIP();
3611 IEM_MC_END();
3612 break;
3613
3614 case IEMMODE_64BIT:
3615 IEM_MC_BEGIN(0, 2);
3616 IEM_MC_LOCAL(uint64_t, u64Value);
3617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3621 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3622 IEM_MC_ADVANCE_RIP();
3623 IEM_MC_END();
3624 break;
3625 }
3626 }
3627 return VINF_SUCCESS;
3628}
3629
3630
3631/**
3632 * @opcode 0x8a
3633 */
3634FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3635{
3636 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3637
3638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3639
3640 /*
3641 * If rm is denoting a register, no more instruction bytes.
3642 */
3643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3644 {
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_BEGIN(0, 1);
3647 IEM_MC_LOCAL(uint8_t, u8Value);
3648 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3649 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3650 IEM_MC_ADVANCE_RIP();
3651 IEM_MC_END();
3652 }
3653 else
3654 {
3655 /*
3656 * We're loading a register from memory.
3657 */
3658 IEM_MC_BEGIN(0, 2);
3659 IEM_MC_LOCAL(uint8_t, u8Value);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3664 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3665 IEM_MC_ADVANCE_RIP();
3666 IEM_MC_END();
3667 }
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/**
3673 * @opcode 0x8b
3674 */
3675FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3676{
3677 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3678
3679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3680
3681 /*
3682 * If rm is denoting a register, no more instruction bytes.
3683 */
3684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3685 {
3686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3687 switch (pVCpu->iem.s.enmEffOpSize)
3688 {
3689 case IEMMODE_16BIT:
3690 IEM_MC_BEGIN(0, 1);
3691 IEM_MC_LOCAL(uint16_t, u16Value);
3692 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3693 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3694 IEM_MC_ADVANCE_RIP();
3695 IEM_MC_END();
3696 break;
3697
3698 case IEMMODE_32BIT:
3699 IEM_MC_BEGIN(0, 1);
3700 IEM_MC_LOCAL(uint32_t, u32Value);
3701 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3702 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 break;
3706
3707 case IEMMODE_64BIT:
3708 IEM_MC_BEGIN(0, 1);
3709 IEM_MC_LOCAL(uint64_t, u64Value);
3710 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3711 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3712 IEM_MC_ADVANCE_RIP();
3713 IEM_MC_END();
3714 break;
3715 }
3716 }
3717 else
3718 {
3719 /*
3720 * We're loading a register from memory.
3721 */
3722 switch (pVCpu->iem.s.enmEffOpSize)
3723 {
3724 case IEMMODE_16BIT:
3725 IEM_MC_BEGIN(0, 2);
3726 IEM_MC_LOCAL(uint16_t, u16Value);
3727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3730 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3731 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3732 IEM_MC_ADVANCE_RIP();
3733 IEM_MC_END();
3734 break;
3735
3736 case IEMMODE_32BIT:
3737 IEM_MC_BEGIN(0, 2);
3738 IEM_MC_LOCAL(uint32_t, u32Value);
3739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3742 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3743 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 break;
3747
3748 case IEMMODE_64BIT:
3749 IEM_MC_BEGIN(0, 2);
3750 IEM_MC_LOCAL(uint64_t, u64Value);
3751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3754 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3755 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3756 IEM_MC_ADVANCE_RIP();
3757 IEM_MC_END();
3758 break;
3759 }
3760 }
3761 return VINF_SUCCESS;
3762}
3763
3764
3765/**
3766 * opcode 0x63
3767 * @todo Table fixme
3768 */
3769FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3770{
3771 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3772 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3773 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3774 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3775 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3776}
3777
3778
3779/**
3780 * @opcode 0x8c
3781 */
3782FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3783{
3784 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3785
3786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3787
3788 /*
3789 * Check that the destination register exists. The REX.R prefix is ignored.
3790 */
3791 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3792 if ( iSegReg > X86_SREG_GS)
3793 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3794
3795 /*
3796 * If rm is denoting a register, no more instruction bytes.
3797 * In that case, the operand size is respected and the upper bits are
3798 * cleared (starting with some pentium).
3799 */
3800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3801 {
3802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3803 switch (pVCpu->iem.s.enmEffOpSize)
3804 {
3805 case IEMMODE_16BIT:
3806 IEM_MC_BEGIN(0, 1);
3807 IEM_MC_LOCAL(uint16_t, u16Value);
3808 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3809 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3810 IEM_MC_ADVANCE_RIP();
3811 IEM_MC_END();
3812 break;
3813
3814 case IEMMODE_32BIT:
3815 IEM_MC_BEGIN(0, 1);
3816 IEM_MC_LOCAL(uint32_t, u32Value);
3817 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3818 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 break;
3822
3823 case IEMMODE_64BIT:
3824 IEM_MC_BEGIN(0, 1);
3825 IEM_MC_LOCAL(uint64_t, u64Value);
3826 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3827 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3828 IEM_MC_ADVANCE_RIP();
3829 IEM_MC_END();
3830 break;
3831 }
3832 }
3833 else
3834 {
3835 /*
3836 * We're saving the register to memory. The access is word sized
3837 * regardless of operand size prefixes.
3838 */
3839#if 0 /* not necessary */
3840 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3841#endif
3842 IEM_MC_BEGIN(0, 2);
3843 IEM_MC_LOCAL(uint16_t, u16Value);
3844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3847 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3848 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3849 IEM_MC_ADVANCE_RIP();
3850 IEM_MC_END();
3851 }
3852 return VINF_SUCCESS;
3853}
3854
3855
3856
3857
3858/**
3859 * @opcode 0x8d
3860 */
3861FNIEMOP_DEF(iemOp_lea_Gv_M)
3862{
3863 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3866 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3867
3868 switch (pVCpu->iem.s.enmEffOpSize)
3869 {
3870 case IEMMODE_16BIT:
3871 IEM_MC_BEGIN(0, 2);
3872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3873 IEM_MC_LOCAL(uint16_t, u16Cast);
3874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3876 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3877 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3878 IEM_MC_ADVANCE_RIP();
3879 IEM_MC_END();
3880 return VINF_SUCCESS;
3881
3882 case IEMMODE_32BIT:
3883 IEM_MC_BEGIN(0, 2);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3885 IEM_MC_LOCAL(uint32_t, u32Cast);
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3888 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3889 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3890 IEM_MC_ADVANCE_RIP();
3891 IEM_MC_END();
3892 return VINF_SUCCESS;
3893
3894 case IEMMODE_64BIT:
3895 IEM_MC_BEGIN(0, 1);
3896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3899 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 return VINF_SUCCESS;
3903 }
3904 AssertFailedReturn(VERR_IEM_IPE_7);
3905}
3906
3907
3908/**
3909 * @opcode 0x8e
3910 */
3911FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3912{
3913 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3914
3915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3916
3917 /*
3918 * The practical operand size is 16-bit.
3919 */
3920#if 0 /* not necessary */
3921 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3922#endif
3923
3924 /*
3925 * Check that the destination register exists and can be used with this
3926 * instruction. The REX.R prefix is ignored.
3927 */
3928 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3929 if ( iSegReg == X86_SREG_CS
3930 || iSegReg > X86_SREG_GS)
3931 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3932
3933 /*
3934 * If rm is denoting a register, no more instruction bytes.
3935 */
3936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3937 {
3938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3939 IEM_MC_BEGIN(2, 0);
3940 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3941 IEM_MC_ARG(uint16_t, u16Value, 1);
3942 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3943 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3944 IEM_MC_END();
3945 }
3946 else
3947 {
3948 /*
3949 * We're loading the register from memory. The access is word sized
3950 * regardless of operand size prefixes.
3951 */
3952 IEM_MC_BEGIN(2, 1);
3953 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3954 IEM_MC_ARG(uint16_t, u16Value, 1);
3955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3959 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3960 IEM_MC_END();
3961 }
3962 return VINF_SUCCESS;
3963}
3964
3965
3966/** Opcode 0x8f /0. */
3967FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3968{
3969 /* This bugger is rather annoying as it requires rSP to be updated before
3970 doing the effective address calculations. Will eventually require a
3971 split between the R/M+SIB decoding and the effective address
3972 calculation - which is something that is required for any attempt at
3973 reusing this code for a recompiler. It may also be good to have if we
3974 need to delay #UD exception caused by invalid lock prefixes.
3975
3976 For now, we'll do a mostly safe interpreter-only implementation here. */
3977 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3978 * now until tests show it's checked.. */
3979 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3980
3981 /* Register access is relatively easy and can share code. */
3982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3983 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3984
3985 /*
3986 * Memory target.
3987 *
3988 * Intel says that RSP is incremented before it's used in any effective
3989 * address calcuations. This means some serious extra annoyance here since
3990 * we decode and calculate the effective address in one step and like to
3991 * delay committing registers till everything is done.
3992 *
3993 * So, we'll decode and calculate the effective address twice. This will
3994 * require some recoding if turned into a recompiler.
3995 */
3996 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3997
3998#ifndef TST_IEM_CHECK_MC
3999 /* Calc effective address with modified ESP. */
4000/** @todo testcase */
4001 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4002 RTGCPTR GCPtrEff;
4003 VBOXSTRICTRC rcStrict;
4004 switch (pVCpu->iem.s.enmEffOpSize)
4005 {
4006 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4007 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4008 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4010 }
4011 if (rcStrict != VINF_SUCCESS)
4012 return rcStrict;
4013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4014
4015 /* Perform the operation - this should be CImpl. */
4016 RTUINT64U TmpRsp;
4017 TmpRsp.u = pCtx->rsp;
4018 switch (pVCpu->iem.s.enmEffOpSize)
4019 {
4020 case IEMMODE_16BIT:
4021 {
4022 uint16_t u16Value;
4023 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4024 if (rcStrict == VINF_SUCCESS)
4025 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4026 break;
4027 }
4028
4029 case IEMMODE_32BIT:
4030 {
4031 uint32_t u32Value;
4032 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4033 if (rcStrict == VINF_SUCCESS)
4034 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4035 break;
4036 }
4037
4038 case IEMMODE_64BIT:
4039 {
4040 uint64_t u64Value;
4041 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4042 if (rcStrict == VINF_SUCCESS)
4043 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4044 break;
4045 }
4046
4047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4048 }
4049 if (rcStrict == VINF_SUCCESS)
4050 {
4051 pCtx->rsp = TmpRsp.u;
4052 iemRegUpdateRipAndClearRF(pVCpu);
4053 }
4054 return rcStrict;
4055
4056#else
4057 return VERR_IEM_IPE_2;
4058#endif
4059}
4060
4061
4062/**
4063 * @opcode 0x8f
4064 */
4065FNIEMOP_DEF(iemOp_Grp1A__xop)
4066{
4067 /*
4068 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4069 * three byte VEX prefix, except that the mmmmm field cannot have the values
4070 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4071 */
4072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4073 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4074 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4075
4076 IEMOP_MNEMONIC(xop, "xop");
4077 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4078 {
4079 /** @todo Test when exctly the XOP conformance checks kick in during
4080 * instruction decoding and fetching (using \#PF). */
4081 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4082 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4083 if ( ( pVCpu->iem.s.fPrefixes
4084 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4085 == 0)
4086 {
4087 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4088 if (bXop2 & 0x80 /* XOP.W */)
4089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4090 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4091 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4092 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4093 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4094 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4095 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4096
4097 /** @todo XOP: Just use new tables and decoders. */
4098 switch (bRm & 0x1f)
4099 {
4100 case 8: /* xop opcode map 8. */
4101 IEMOP_BITCH_ABOUT_STUB();
4102 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4103
4104 case 9: /* xop opcode map 9. */
4105 IEMOP_BITCH_ABOUT_STUB();
4106 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4107
4108 case 10: /* xop opcode map 10. */
4109 IEMOP_BITCH_ABOUT_STUB();
4110 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4111
4112 default:
4113 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4114 return IEMOP_RAISE_INVALID_OPCODE();
4115 }
4116 }
4117 else
4118 Log(("XOP: Invalid prefix mix!\n"));
4119 }
4120 else
4121 Log(("XOP: XOP support disabled!\n"));
4122 return IEMOP_RAISE_INVALID_OPCODE();
4123}
4124
4125
4126/**
4127 * Common 'xchg reg,rAX' helper.
4128 */
4129FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4130{
4131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4132
4133 iReg |= pVCpu->iem.s.uRexB;
4134 switch (pVCpu->iem.s.enmEffOpSize)
4135 {
4136 case IEMMODE_16BIT:
4137 IEM_MC_BEGIN(0, 2);
4138 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4139 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4140 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4141 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4142 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4143 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 return VINF_SUCCESS;
4147
4148 case IEMMODE_32BIT:
4149 IEM_MC_BEGIN(0, 2);
4150 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4151 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4152 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4153 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4154 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4155 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159
4160 case IEMMODE_64BIT:
4161 IEM_MC_BEGIN(0, 2);
4162 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4163 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4164 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4165 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4166 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4167 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4168 IEM_MC_ADVANCE_RIP();
4169 IEM_MC_END();
4170 return VINF_SUCCESS;
4171
4172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4173 }
4174}
4175
4176
4177/**
4178 * @opcode 0x90
4179 */
4180FNIEMOP_DEF(iemOp_nop)
4181{
4182 /* R8/R8D and RAX/EAX can be exchanged. */
4183 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4184 {
4185 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4186 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4187 }
4188
4189 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4190 IEMOP_MNEMONIC(pause, "pause");
4191 else
4192 IEMOP_MNEMONIC(nop, "nop");
4193 IEM_MC_BEGIN(0, 0);
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 return VINF_SUCCESS;
4197}
4198
4199
4200/**
4201 * @opcode 0x91
4202 */
4203FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4204{
4205 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4206 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4207}
4208
4209
4210/**
4211 * @opcode 0x92
4212 */
4213FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4214{
4215 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4216 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4217}
4218
4219
4220/**
4221 * @opcode 0x93
4222 */
4223FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4224{
4225 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4226 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4227}
4228
4229
4230/**
4231 * @opcode 0x94
4232 */
4233FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4234{
4235 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4236 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4237}
4238
4239
4240/**
4241 * @opcode 0x95
4242 */
4243FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4244{
4245 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4246 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4247}
4248
4249
4250/**
4251 * @opcode 0x96
4252 */
4253FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4254{
4255 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4256 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4257}
4258
4259
4260/**
4261 * @opcode 0x97
4262 */
4263FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4264{
4265 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4266 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4267}
4268
4269
4270/**
4271 * @opcode 0x98
4272 */
4273FNIEMOP_DEF(iemOp_cbw)
4274{
4275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4276 switch (pVCpu->iem.s.enmEffOpSize)
4277 {
4278 case IEMMODE_16BIT:
4279 IEMOP_MNEMONIC(cbw, "cbw");
4280 IEM_MC_BEGIN(0, 1);
4281 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4282 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4283 } IEM_MC_ELSE() {
4284 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4285 } IEM_MC_ENDIF();
4286 IEM_MC_ADVANCE_RIP();
4287 IEM_MC_END();
4288 return VINF_SUCCESS;
4289
4290 case IEMMODE_32BIT:
4291 IEMOP_MNEMONIC(cwde, "cwde");
4292 IEM_MC_BEGIN(0, 1);
4293 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4294 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4295 } IEM_MC_ELSE() {
4296 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4297 } IEM_MC_ENDIF();
4298 IEM_MC_ADVANCE_RIP();
4299 IEM_MC_END();
4300 return VINF_SUCCESS;
4301
4302 case IEMMODE_64BIT:
4303 IEMOP_MNEMONIC(cdqe, "cdqe");
4304 IEM_MC_BEGIN(0, 1);
4305 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4306 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4307 } IEM_MC_ELSE() {
4308 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4309 } IEM_MC_ENDIF();
4310 IEM_MC_ADVANCE_RIP();
4311 IEM_MC_END();
4312 return VINF_SUCCESS;
4313
4314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4315 }
4316}
4317
4318
4319/**
4320 * @opcode 0x99
4321 */
4322FNIEMOP_DEF(iemOp_cwd)
4323{
4324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4325 switch (pVCpu->iem.s.enmEffOpSize)
4326 {
4327 case IEMMODE_16BIT:
4328 IEMOP_MNEMONIC(cwd, "cwd");
4329 IEM_MC_BEGIN(0, 1);
4330 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4331 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 return VINF_SUCCESS;
4338
4339 case IEMMODE_32BIT:
4340 IEMOP_MNEMONIC(cdq, "cdq");
4341 IEM_MC_BEGIN(0, 1);
4342 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4343 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4344 } IEM_MC_ELSE() {
4345 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4346 } IEM_MC_ENDIF();
4347 IEM_MC_ADVANCE_RIP();
4348 IEM_MC_END();
4349 return VINF_SUCCESS;
4350
4351 case IEMMODE_64BIT:
4352 IEMOP_MNEMONIC(cqo, "cqo");
4353 IEM_MC_BEGIN(0, 1);
4354 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4355 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4356 } IEM_MC_ELSE() {
4357 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4358 } IEM_MC_ENDIF();
4359 IEM_MC_ADVANCE_RIP();
4360 IEM_MC_END();
4361 return VINF_SUCCESS;
4362
4363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4364 }
4365}
4366
4367
4368/**
4369 * @opcode 0x9a
4370 */
4371FNIEMOP_DEF(iemOp_call_Ap)
4372{
4373 IEMOP_MNEMONIC(call_Ap, "call Ap");
4374 IEMOP_HLP_NO_64BIT();
4375
4376 /* Decode the far pointer address and pass it on to the far call C implementation. */
4377 uint32_t offSeg;
4378 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4379 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4380 else
4381 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4382 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4384 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4385}
4386
4387
4388/** Opcode 0x9b. (aka fwait) */
4389FNIEMOP_DEF(iemOp_wait)
4390{
4391 IEMOP_MNEMONIC(wait, "wait");
4392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4393
4394 IEM_MC_BEGIN(0, 0);
4395 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4397 IEM_MC_ADVANCE_RIP();
4398 IEM_MC_END();
4399 return VINF_SUCCESS;
4400}
4401
4402
4403/**
4404 * @opcode 0x9c
4405 */
4406FNIEMOP_DEF(iemOp_pushf_Fv)
4407{
4408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4409 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4410 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4411}
4412
4413
4414/**
4415 * @opcode 0x9d
4416 */
4417FNIEMOP_DEF(iemOp_popf_Fv)
4418{
4419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4421 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4422}
4423
4424
4425/**
4426 * @opcode 0x9e
4427 */
4428FNIEMOP_DEF(iemOp_sahf)
4429{
4430 IEMOP_MNEMONIC(sahf, "sahf");
4431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4432 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4433 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4434 return IEMOP_RAISE_INVALID_OPCODE();
4435 IEM_MC_BEGIN(0, 2);
4436 IEM_MC_LOCAL(uint32_t, u32Flags);
4437 IEM_MC_LOCAL(uint32_t, EFlags);
4438 IEM_MC_FETCH_EFLAGS(EFlags);
4439 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4440 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4441 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4442 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4443 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4444 IEM_MC_COMMIT_EFLAGS(EFlags);
4445 IEM_MC_ADVANCE_RIP();
4446 IEM_MC_END();
4447 return VINF_SUCCESS;
4448}
4449
4450
4451/**
4452 * @opcode 0x9f
4453 */
4454FNIEMOP_DEF(iemOp_lahf)
4455{
4456 IEMOP_MNEMONIC(lahf, "lahf");
4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4458 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4459 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4460 return IEMOP_RAISE_INVALID_OPCODE();
4461 IEM_MC_BEGIN(0, 1);
4462 IEM_MC_LOCAL(uint8_t, u8Flags);
4463 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4464 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/**
4472 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4473 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4474 * prefixes. Will return on failures.
4475 * @param a_GCPtrMemOff The variable to store the offset in.
4476 */
4477#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4478 do \
4479 { \
4480 switch (pVCpu->iem.s.enmEffAddrMode) \
4481 { \
4482 case IEMMODE_16BIT: \
4483 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4484 break; \
4485 case IEMMODE_32BIT: \
4486 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4487 break; \
4488 case IEMMODE_64BIT: \
4489 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4490 break; \
4491 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4492 } \
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4494 } while (0)
4495
4496/**
4497 * @opcode 0xa0
4498 */
4499FNIEMOP_DEF(iemOp_mov_AL_Ob)
4500{
4501 /*
4502 * Get the offset and fend of lock prefixes.
4503 */
4504 RTGCPTR GCPtrMemOff;
4505 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4506
4507 /*
4508 * Fetch AL.
4509 */
4510 IEM_MC_BEGIN(0,1);
4511 IEM_MC_LOCAL(uint8_t, u8Tmp);
4512 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4513 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4514 IEM_MC_ADVANCE_RIP();
4515 IEM_MC_END();
4516 return VINF_SUCCESS;
4517}
4518
4519
4520/**
4521 * @opcode 0xa1
4522 */
4523FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4524{
4525 /*
4526 * Get the offset and fend of lock prefixes.
4527 */
4528 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4529 RTGCPTR GCPtrMemOff;
4530 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4531
4532 /*
4533 * Fetch rAX.
4534 */
4535 switch (pVCpu->iem.s.enmEffOpSize)
4536 {
4537 case IEMMODE_16BIT:
4538 IEM_MC_BEGIN(0,1);
4539 IEM_MC_LOCAL(uint16_t, u16Tmp);
4540 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4541 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4542 IEM_MC_ADVANCE_RIP();
4543 IEM_MC_END();
4544 return VINF_SUCCESS;
4545
4546 case IEMMODE_32BIT:
4547 IEM_MC_BEGIN(0,1);
4548 IEM_MC_LOCAL(uint32_t, u32Tmp);
4549 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4550 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4551 IEM_MC_ADVANCE_RIP();
4552 IEM_MC_END();
4553 return VINF_SUCCESS;
4554
4555 case IEMMODE_64BIT:
4556 IEM_MC_BEGIN(0,1);
4557 IEM_MC_LOCAL(uint64_t, u64Tmp);
4558 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4559 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 return VINF_SUCCESS;
4563
4564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4565 }
4566}
4567
4568
4569/**
4570 * @opcode 0xa2
4571 */
4572FNIEMOP_DEF(iemOp_mov_Ob_AL)
4573{
4574 /*
4575 * Get the offset and fend of lock prefixes.
4576 */
4577 RTGCPTR GCPtrMemOff;
4578 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4579
4580 /*
4581 * Store AL.
4582 */
4583 IEM_MC_BEGIN(0,1);
4584 IEM_MC_LOCAL(uint8_t, u8Tmp);
4585 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4586 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 return VINF_SUCCESS;
4590}
4591
4592
4593/**
4594 * @opcode 0xa3
4595 */
4596FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4597{
4598 /*
4599 * Get the offset and fend of lock prefixes.
4600 */
4601 RTGCPTR GCPtrMemOff;
4602 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4603
4604 /*
4605 * Store rAX.
4606 */
4607 switch (pVCpu->iem.s.enmEffOpSize)
4608 {
4609 case IEMMODE_16BIT:
4610 IEM_MC_BEGIN(0,1);
4611 IEM_MC_LOCAL(uint16_t, u16Tmp);
4612 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4613 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 return VINF_SUCCESS;
4617
4618 case IEMMODE_32BIT:
4619 IEM_MC_BEGIN(0,1);
4620 IEM_MC_LOCAL(uint32_t, u32Tmp);
4621 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4622 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4623 IEM_MC_ADVANCE_RIP();
4624 IEM_MC_END();
4625 return VINF_SUCCESS;
4626
4627 case IEMMODE_64BIT:
4628 IEM_MC_BEGIN(0,1);
4629 IEM_MC_LOCAL(uint64_t, u64Tmp);
4630 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4631 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 return VINF_SUCCESS;
4635
4636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4637 }
4638}
4639
4640/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4641#define IEM_MOVS_CASE(ValBits, AddrBits) \
4642 IEM_MC_BEGIN(0, 2); \
4643 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4644 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4645 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4646 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4647 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4648 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4650 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4651 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4652 } IEM_MC_ELSE() { \
4653 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4654 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4655 } IEM_MC_ENDIF(); \
4656 IEM_MC_ADVANCE_RIP(); \
4657 IEM_MC_END();
4658
4659/**
4660 * @opcode 0xa4
4661 */
4662FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4663{
4664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4665
4666 /*
4667 * Use the C implementation if a repeat prefix is encountered.
4668 */
4669 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4670 {
4671 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4672 switch (pVCpu->iem.s.enmEffAddrMode)
4673 {
4674 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4675 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4676 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4678 }
4679 }
4680 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4681
4682 /*
4683 * Sharing case implementation with movs[wdq] below.
4684 */
4685 switch (pVCpu->iem.s.enmEffAddrMode)
4686 {
4687 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4688 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4689 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4691 }
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/**
4697 * @opcode 0xa5
4698 */
4699FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4700{
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702
4703 /*
4704 * Use the C implementation if a repeat prefix is encountered.
4705 */
4706 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4707 {
4708 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4709 switch (pVCpu->iem.s.enmEffOpSize)
4710 {
4711 case IEMMODE_16BIT:
4712 switch (pVCpu->iem.s.enmEffAddrMode)
4713 {
4714 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4715 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4716 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4718 }
4719 break;
4720 case IEMMODE_32BIT:
4721 switch (pVCpu->iem.s.enmEffAddrMode)
4722 {
4723 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4724 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4725 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4727 }
4728 case IEMMODE_64BIT:
4729 switch (pVCpu->iem.s.enmEffAddrMode)
4730 {
4731 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4732 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4733 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4735 }
4736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4737 }
4738 }
4739 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4740
4741 /*
4742 * Annoying double switch here.
4743 * Using ugly macro for implementing the cases, sharing it with movsb.
4744 */
4745 switch (pVCpu->iem.s.enmEffOpSize)
4746 {
4747 case IEMMODE_16BIT:
4748 switch (pVCpu->iem.s.enmEffAddrMode)
4749 {
4750 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4751 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4752 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4754 }
4755 break;
4756
4757 case IEMMODE_32BIT:
4758 switch (pVCpu->iem.s.enmEffAddrMode)
4759 {
4760 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4761 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4762 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4764 }
4765 break;
4766
4767 case IEMMODE_64BIT:
4768 switch (pVCpu->iem.s.enmEffAddrMode)
4769 {
4770 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4771 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4772 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4774 }
4775 break;
4776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4777 }
4778 return VINF_SUCCESS;
4779}
4780
4781#undef IEM_MOVS_CASE
4782
4783/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4784#define IEM_CMPS_CASE(ValBits, AddrBits) \
4785 IEM_MC_BEGIN(3, 3); \
4786 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4787 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4788 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4789 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4790 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4791 \
4792 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4793 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4794 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4795 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4796 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4797 IEM_MC_REF_EFLAGS(pEFlags); \
4798 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4799 \
4800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4801 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4802 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4803 } IEM_MC_ELSE() { \
4804 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4805 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4806 } IEM_MC_ENDIF(); \
4807 IEM_MC_ADVANCE_RIP(); \
4808 IEM_MC_END(); \
4809
4810/**
4811 * @opcode 0xa6
4812 */
4813FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4814{
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816
4817 /*
4818 * Use the C implementation if a repeat prefix is encountered.
4819 */
4820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4821 {
4822 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4823 switch (pVCpu->iem.s.enmEffAddrMode)
4824 {
4825 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4826 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4827 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4829 }
4830 }
4831 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4832 {
4833 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4834 switch (pVCpu->iem.s.enmEffAddrMode)
4835 {
4836 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4837 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4838 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4840 }
4841 }
4842 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4843
4844 /*
4845 * Sharing case implementation with cmps[wdq] below.
4846 */
4847 switch (pVCpu->iem.s.enmEffAddrMode)
4848 {
4849 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4850 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4851 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4853 }
4854 return VINF_SUCCESS;
4855
4856}
4857
4858
4859/**
4860 * @opcode 0xa7
4861 */
4862FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4863{
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4865
4866 /*
4867 * Use the C implementation if a repeat prefix is encountered.
4868 */
4869 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4870 {
4871 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4872 switch (pVCpu->iem.s.enmEffOpSize)
4873 {
4874 case IEMMODE_16BIT:
4875 switch (pVCpu->iem.s.enmEffAddrMode)
4876 {
4877 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4878 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4879 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4881 }
4882 break;
4883 case IEMMODE_32BIT:
4884 switch (pVCpu->iem.s.enmEffAddrMode)
4885 {
4886 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4887 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4888 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4890 }
4891 case IEMMODE_64BIT:
4892 switch (pVCpu->iem.s.enmEffAddrMode)
4893 {
4894 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4895 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4896 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4898 }
4899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4900 }
4901 }
4902
4903 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4904 {
4905 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4906 switch (pVCpu->iem.s.enmEffOpSize)
4907 {
4908 case IEMMODE_16BIT:
4909 switch (pVCpu->iem.s.enmEffAddrMode)
4910 {
4911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4915 }
4916 break;
4917 case IEMMODE_32BIT:
4918 switch (pVCpu->iem.s.enmEffAddrMode)
4919 {
4920 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4921 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4922 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4924 }
4925 case IEMMODE_64BIT:
4926 switch (pVCpu->iem.s.enmEffAddrMode)
4927 {
4928 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4929 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4930 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4932 }
4933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4934 }
4935 }
4936
4937 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4938
4939 /*
4940 * Annoying double switch here.
4941 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4942 */
4943 switch (pVCpu->iem.s.enmEffOpSize)
4944 {
4945 case IEMMODE_16BIT:
4946 switch (pVCpu->iem.s.enmEffAddrMode)
4947 {
4948 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4949 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4950 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4952 }
4953 break;
4954
4955 case IEMMODE_32BIT:
4956 switch (pVCpu->iem.s.enmEffAddrMode)
4957 {
4958 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4959 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4960 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4962 }
4963 break;
4964
4965 case IEMMODE_64BIT:
4966 switch (pVCpu->iem.s.enmEffAddrMode)
4967 {
4968 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4969 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4970 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4972 }
4973 break;
4974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4975 }
4976 return VINF_SUCCESS;
4977
4978}
4979
4980#undef IEM_CMPS_CASE
4981
4982/**
4983 * @opcode 0xa8
4984 */
4985FNIEMOP_DEF(iemOp_test_AL_Ib)
4986{
4987 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4989 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4990}
4991
4992
4993/**
4994 * @opcode 0xa9
4995 */
4996FNIEMOP_DEF(iemOp_test_eAX_Iz)
4997{
4998 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4999 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5000 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5001}
5002
5003
5004/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5005#define IEM_STOS_CASE(ValBits, AddrBits) \
5006 IEM_MC_BEGIN(0, 2); \
5007 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5008 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5009 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5010 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5011 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5013 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5014 } IEM_MC_ELSE() { \
5015 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5016 } IEM_MC_ENDIF(); \
5017 IEM_MC_ADVANCE_RIP(); \
5018 IEM_MC_END(); \
5019
5020/**
5021 * @opcode 0xaa
5022 */
5023FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5024{
5025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5026
5027 /*
5028 * Use the C implementation if a repeat prefix is encountered.
5029 */
5030 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5031 {
5032 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5033 switch (pVCpu->iem.s.enmEffAddrMode)
5034 {
5035 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5036 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5037 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5039 }
5040 }
5041 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5042
5043 /*
5044 * Sharing case implementation with stos[wdq] below.
5045 */
5046 switch (pVCpu->iem.s.enmEffAddrMode)
5047 {
5048 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5049 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5050 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5052 }
5053 return VINF_SUCCESS;
5054}
5055
5056
5057/**
5058 * @opcode 0xab
5059 */
5060FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5061{
5062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5063
5064 /*
5065 * Use the C implementation if a repeat prefix is encountered.
5066 */
5067 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5068 {
5069 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5070 switch (pVCpu->iem.s.enmEffOpSize)
5071 {
5072 case IEMMODE_16BIT:
5073 switch (pVCpu->iem.s.enmEffAddrMode)
5074 {
5075 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5076 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5077 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5079 }
5080 break;
5081 case IEMMODE_32BIT:
5082 switch (pVCpu->iem.s.enmEffAddrMode)
5083 {
5084 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5085 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5086 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5088 }
5089 case IEMMODE_64BIT:
5090 switch (pVCpu->iem.s.enmEffAddrMode)
5091 {
5092 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5093 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5094 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5096 }
5097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5098 }
5099 }
5100 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5101
5102 /*
5103 * Annoying double switch here.
5104 * Using ugly macro for implementing the cases, sharing it with stosb.
5105 */
5106 switch (pVCpu->iem.s.enmEffOpSize)
5107 {
5108 case IEMMODE_16BIT:
5109 switch (pVCpu->iem.s.enmEffAddrMode)
5110 {
5111 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5112 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5113 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5115 }
5116 break;
5117
5118 case IEMMODE_32BIT:
5119 switch (pVCpu->iem.s.enmEffAddrMode)
5120 {
5121 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5122 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5123 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5125 }
5126 break;
5127
5128 case IEMMODE_64BIT:
5129 switch (pVCpu->iem.s.enmEffAddrMode)
5130 {
5131 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5132 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5133 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5135 }
5136 break;
5137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5138 }
5139 return VINF_SUCCESS;
5140}
5141
5142#undef IEM_STOS_CASE
5143
5144/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5145#define IEM_LODS_CASE(ValBits, AddrBits) \
5146 IEM_MC_BEGIN(0, 2); \
5147 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5148 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5149 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5150 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5151 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5153 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5154 } IEM_MC_ELSE() { \
5155 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5156 } IEM_MC_ENDIF(); \
5157 IEM_MC_ADVANCE_RIP(); \
5158 IEM_MC_END();
5159
5160/**
5161 * @opcode 0xac
5162 */
5163FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5164{
5165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5166
5167 /*
5168 * Use the C implementation if a repeat prefix is encountered.
5169 */
5170 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5171 {
5172 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5173 switch (pVCpu->iem.s.enmEffAddrMode)
5174 {
5175 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5176 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5177 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5179 }
5180 }
5181 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5182
5183 /*
5184 * Sharing case implementation with stos[wdq] below.
5185 */
5186 switch (pVCpu->iem.s.enmEffAddrMode)
5187 {
5188 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5189 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5190 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5192 }
5193 return VINF_SUCCESS;
5194}
5195
5196
5197/**
5198 * @opcode 0xad
5199 */
5200FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5201{
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203
5204 /*
5205 * Use the C implementation if a repeat prefix is encountered.
5206 */
5207 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5208 {
5209 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5210 switch (pVCpu->iem.s.enmEffOpSize)
5211 {
5212 case IEMMODE_16BIT:
5213 switch (pVCpu->iem.s.enmEffAddrMode)
5214 {
5215 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5216 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5217 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5219 }
5220 break;
5221 case IEMMODE_32BIT:
5222 switch (pVCpu->iem.s.enmEffAddrMode)
5223 {
5224 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5225 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5226 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5228 }
5229 case IEMMODE_64BIT:
5230 switch (pVCpu->iem.s.enmEffAddrMode)
5231 {
5232 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5236 }
5237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5238 }
5239 }
5240 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5241
5242 /*
5243 * Annoying double switch here.
5244 * Using ugly macro for implementing the cases, sharing it with lodsb.
5245 */
5246 switch (pVCpu->iem.s.enmEffOpSize)
5247 {
5248 case IEMMODE_16BIT:
5249 switch (pVCpu->iem.s.enmEffAddrMode)
5250 {
5251 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5252 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5253 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5255 }
5256 break;
5257
5258 case IEMMODE_32BIT:
5259 switch (pVCpu->iem.s.enmEffAddrMode)
5260 {
5261 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5262 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5263 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 break;
5267
5268 case IEMMODE_64BIT:
5269 switch (pVCpu->iem.s.enmEffAddrMode)
5270 {
5271 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5272 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5273 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 break;
5277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5278 }
5279 return VINF_SUCCESS;
5280}
5281
5282#undef IEM_LODS_CASE
5283
5284/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5285#define IEM_SCAS_CASE(ValBits, AddrBits) \
5286 IEM_MC_BEGIN(3, 2); \
5287 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5288 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5289 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5290 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5291 \
5292 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5293 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5294 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5295 IEM_MC_REF_EFLAGS(pEFlags); \
5296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5297 \
5298 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5299 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5300 } IEM_MC_ELSE() { \
5301 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5302 } IEM_MC_ENDIF(); \
5303 IEM_MC_ADVANCE_RIP(); \
5304 IEM_MC_END();
5305
5306/**
5307 * @opcode 0xae
5308 */
5309FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5310{
5311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5312
5313 /*
5314 * Use the C implementation if a repeat prefix is encountered.
5315 */
5316 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5317 {
5318 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5319 switch (pVCpu->iem.s.enmEffAddrMode)
5320 {
5321 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5322 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5323 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5325 }
5326 }
5327 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5328 {
5329 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5330 switch (pVCpu->iem.s.enmEffAddrMode)
5331 {
5332 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5333 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5334 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5336 }
5337 }
5338 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5339
5340 /*
5341 * Sharing case implementation with stos[wdq] below.
5342 */
5343 switch (pVCpu->iem.s.enmEffAddrMode)
5344 {
5345 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5346 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5347 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5349 }
5350 return VINF_SUCCESS;
5351}
5352
5353
5354/**
5355 * @opcode 0xaf
5356 */
5357FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5358{
5359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5360
5361 /*
5362 * Use the C implementation if a repeat prefix is encountered.
5363 */
5364 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5365 {
5366 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5367 switch (pVCpu->iem.s.enmEffOpSize)
5368 {
5369 case IEMMODE_16BIT:
5370 switch (pVCpu->iem.s.enmEffAddrMode)
5371 {
5372 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5373 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5374 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 break;
5378 case IEMMODE_32BIT:
5379 switch (pVCpu->iem.s.enmEffAddrMode)
5380 {
5381 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5382 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5383 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5385 }
5386 case IEMMODE_64BIT:
5387 switch (pVCpu->iem.s.enmEffAddrMode)
5388 {
5389 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5390 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5391 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5393 }
5394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5395 }
5396 }
5397 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5398 {
5399 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5400 switch (pVCpu->iem.s.enmEffOpSize)
5401 {
5402 case IEMMODE_16BIT:
5403 switch (pVCpu->iem.s.enmEffAddrMode)
5404 {
5405 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5406 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5407 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5409 }
5410 break;
5411 case IEMMODE_32BIT:
5412 switch (pVCpu->iem.s.enmEffAddrMode)
5413 {
5414 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5415 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5416 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5418 }
5419 case IEMMODE_64BIT:
5420 switch (pVCpu->iem.s.enmEffAddrMode)
5421 {
5422 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5423 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5424 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5426 }
5427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5428 }
5429 }
5430 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5431
5432 /*
5433 * Annoying double switch here.
5434 * Using ugly macro for implementing the cases, sharing it with scasb.
5435 */
5436 switch (pVCpu->iem.s.enmEffOpSize)
5437 {
5438 case IEMMODE_16BIT:
5439 switch (pVCpu->iem.s.enmEffAddrMode)
5440 {
5441 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5442 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5443 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5445 }
5446 break;
5447
5448 case IEMMODE_32BIT:
5449 switch (pVCpu->iem.s.enmEffAddrMode)
5450 {
5451 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5452 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5453 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5455 }
5456 break;
5457
5458 case IEMMODE_64BIT:
5459 switch (pVCpu->iem.s.enmEffAddrMode)
5460 {
5461 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5462 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5463 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5465 }
5466 break;
5467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5468 }
5469 return VINF_SUCCESS;
5470}
5471
5472#undef IEM_SCAS_CASE
5473
5474/**
5475 * Common 'mov r8, imm8' helper.
5476 */
5477FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5478{
5479 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481
5482 IEM_MC_BEGIN(0, 1);
5483 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5484 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487
5488 return VINF_SUCCESS;
5489}
5490
5491
5492/**
5493 * @opcode 0xb0
5494 */
5495FNIEMOP_DEF(iemOp_mov_AL_Ib)
5496{
5497 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5498 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5499}
5500
5501
5502/**
5503 * @opcode 0xb1
5504 */
5505FNIEMOP_DEF(iemOp_CL_Ib)
5506{
5507 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5508 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5509}
5510
5511
5512/**
5513 * @opcode 0xb2
5514 */
5515FNIEMOP_DEF(iemOp_DL_Ib)
5516{
5517 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5518 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5519}
5520
5521
5522/**
5523 * @opcode 0xb3
5524 */
5525FNIEMOP_DEF(iemOp_BL_Ib)
5526{
5527 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5528 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5529}
5530
5531
5532/**
5533 * @opcode 0xb4
5534 */
5535FNIEMOP_DEF(iemOp_mov_AH_Ib)
5536{
5537 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5538 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5539}
5540
5541
5542/**
5543 * @opcode 0xb5
5544 */
5545FNIEMOP_DEF(iemOp_CH_Ib)
5546{
5547 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5548 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5549}
5550
5551
5552/**
5553 * @opcode 0xb6
5554 */
5555FNIEMOP_DEF(iemOp_DH_Ib)
5556{
5557 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5558 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5559}
5560
5561
5562/**
5563 * @opcode 0xb7
5564 */
5565FNIEMOP_DEF(iemOp_BH_Ib)
5566{
5567 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5568 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5569}
5570
5571
5572/**
5573 * Common 'mov regX,immX' helper.
5574 */
5575FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5576{
5577 switch (pVCpu->iem.s.enmEffOpSize)
5578 {
5579 case IEMMODE_16BIT:
5580 {
5581 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5583
5584 IEM_MC_BEGIN(0, 1);
5585 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5586 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5587 IEM_MC_ADVANCE_RIP();
5588 IEM_MC_END();
5589 break;
5590 }
5591
5592 case IEMMODE_32BIT:
5593 {
5594 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5596
5597 IEM_MC_BEGIN(0, 1);
5598 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5599 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5600 IEM_MC_ADVANCE_RIP();
5601 IEM_MC_END();
5602 break;
5603 }
5604 case IEMMODE_64BIT:
5605 {
5606 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608
5609 IEM_MC_BEGIN(0, 1);
5610 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5611 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5612 IEM_MC_ADVANCE_RIP();
5613 IEM_MC_END();
5614 break;
5615 }
5616 }
5617
5618 return VINF_SUCCESS;
5619}
5620
5621
5622/**
5623 * @opcode 0xb8
5624 */
5625FNIEMOP_DEF(iemOp_eAX_Iv)
5626{
5627 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5628 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5629}
5630
5631
5632/**
5633 * @opcode 0xb9
5634 */
5635FNIEMOP_DEF(iemOp_eCX_Iv)
5636{
5637 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5638 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5639}
5640
5641
5642/**
5643 * @opcode 0xba
5644 */
5645FNIEMOP_DEF(iemOp_eDX_Iv)
5646{
5647 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5648 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5649}
5650
5651
5652/**
5653 * @opcode 0xbb
5654 */
5655FNIEMOP_DEF(iemOp_eBX_Iv)
5656{
5657 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5658 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5659}
5660
5661
5662/**
5663 * @opcode 0xbc
5664 */
5665FNIEMOP_DEF(iemOp_eSP_Iv)
5666{
5667 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5668 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5669}
5670
5671
5672/**
5673 * @opcode 0xbd
5674 */
5675FNIEMOP_DEF(iemOp_eBP_Iv)
5676{
5677 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5678 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5679}
5680
5681
5682/**
5683 * @opcode 0xbe
5684 */
5685FNIEMOP_DEF(iemOp_eSI_Iv)
5686{
5687 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5688 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5689}
5690
5691
5692/**
5693 * @opcode 0xbf
5694 */
5695FNIEMOP_DEF(iemOp_eDI_Iv)
5696{
5697 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5698 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5699}
5700
5701
5702/**
5703 * @opcode 0xc0
5704 */
5705FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5706{
5707 IEMOP_HLP_MIN_186();
5708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5709 PCIEMOPSHIFTSIZES pImpl;
5710 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5711 {
5712 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5713 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5714 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5715 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5716 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5717 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5718 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5719 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5720 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5721 }
5722 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5723
5724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5725 {
5726 /* register */
5727 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5729 IEM_MC_BEGIN(3, 0);
5730 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5731 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5732 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5733 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5734 IEM_MC_REF_EFLAGS(pEFlags);
5735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 }
5739 else
5740 {
5741 /* memory */
5742 IEM_MC_BEGIN(3, 2);
5743 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5744 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5745 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5747
5748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5749 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5750 IEM_MC_ASSIGN(cShiftArg, cShift);
5751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5752 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5753 IEM_MC_FETCH_EFLAGS(EFlags);
5754 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5755
5756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5757 IEM_MC_COMMIT_EFLAGS(EFlags);
5758 IEM_MC_ADVANCE_RIP();
5759 IEM_MC_END();
5760 }
5761 return VINF_SUCCESS;
5762}
5763
5764
5765/**
5766 * @opcode 0xc1
5767 */
5768FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5769{
5770 IEMOP_HLP_MIN_186();
5771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5772 PCIEMOPSHIFTSIZES pImpl;
5773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5774 {
5775 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5776 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5777 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5778 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5779 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5780 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5781 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5782 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5783 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5784 }
5785 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5786
5787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5788 {
5789 /* register */
5790 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5792 switch (pVCpu->iem.s.enmEffOpSize)
5793 {
5794 case IEMMODE_16BIT:
5795 IEM_MC_BEGIN(3, 0);
5796 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5797 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5798 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5799 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5800 IEM_MC_REF_EFLAGS(pEFlags);
5801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5802 IEM_MC_ADVANCE_RIP();
5803 IEM_MC_END();
5804 return VINF_SUCCESS;
5805
5806 case IEMMODE_32BIT:
5807 IEM_MC_BEGIN(3, 0);
5808 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5809 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5811 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5812 IEM_MC_REF_EFLAGS(pEFlags);
5813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5814 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5815 IEM_MC_ADVANCE_RIP();
5816 IEM_MC_END();
5817 return VINF_SUCCESS;
5818
5819 case IEMMODE_64BIT:
5820 IEM_MC_BEGIN(3, 0);
5821 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5822 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5824 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5825 IEM_MC_REF_EFLAGS(pEFlags);
5826 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5827 IEM_MC_ADVANCE_RIP();
5828 IEM_MC_END();
5829 return VINF_SUCCESS;
5830
5831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5832 }
5833 }
5834 else
5835 {
5836 /* memory */
5837 switch (pVCpu->iem.s.enmEffOpSize)
5838 {
5839 case IEMMODE_16BIT:
5840 IEM_MC_BEGIN(3, 2);
5841 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5842 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5843 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5845
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5847 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5848 IEM_MC_ASSIGN(cShiftArg, cShift);
5849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5850 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5851 IEM_MC_FETCH_EFLAGS(EFlags);
5852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5853
5854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5855 IEM_MC_COMMIT_EFLAGS(EFlags);
5856 IEM_MC_ADVANCE_RIP();
5857 IEM_MC_END();
5858 return VINF_SUCCESS;
5859
5860 case IEMMODE_32BIT:
5861 IEM_MC_BEGIN(3, 2);
5862 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5863 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5864 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5866
5867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5868 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5869 IEM_MC_ASSIGN(cShiftArg, cShift);
5870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5871 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5872 IEM_MC_FETCH_EFLAGS(EFlags);
5873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5874
5875 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5876 IEM_MC_COMMIT_EFLAGS(EFlags);
5877 IEM_MC_ADVANCE_RIP();
5878 IEM_MC_END();
5879 return VINF_SUCCESS;
5880
5881 case IEMMODE_64BIT:
5882 IEM_MC_BEGIN(3, 2);
5883 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5884 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5885 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5887
5888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5889 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5890 IEM_MC_ASSIGN(cShiftArg, cShift);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5893 IEM_MC_FETCH_EFLAGS(EFlags);
5894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5895
5896 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5897 IEM_MC_COMMIT_EFLAGS(EFlags);
5898 IEM_MC_ADVANCE_RIP();
5899 IEM_MC_END();
5900 return VINF_SUCCESS;
5901
5902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5903 }
5904 }
5905}
5906
5907
5908/**
5909 * @opcode 0xc2
5910 */
5911FNIEMOP_DEF(iemOp_retn_Iw)
5912{
5913 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5914 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5917 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5918}
5919
5920
5921/**
5922 * @opcode 0xc3
5923 */
5924FNIEMOP_DEF(iemOp_retn)
5925{
5926 IEMOP_MNEMONIC(retn, "retn");
5927 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5930}
5931
5932
5933/**
5934 * @opcode 0xc4
5935 */
5936FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5937{
5938 /* The LES instruction is invalid 64-bit mode. In legacy and
5939 compatability mode it is invalid with MOD=3.
5940 The use as a VEX prefix is made possible by assigning the inverted
5941 REX.R to the top MOD bit, and the top bit in the inverted register
5942 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5943 to accessing registers 0..7 in this VEX form. */
5944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5945 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5946 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5947 {
5948 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5949 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5950 {
5951 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5952 if ( ( pVCpu->iem.s.fPrefixes
5953 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5954 == 0)
5955 {
5956 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5957 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5958 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5959 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5960 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5961
5962 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5963 }
5964
5965 Log(("VEX2: Invalid prefix mix!\n"));
5966 }
5967 else
5968 Log(("VEX2: AVX support disabled!\n"));
5969
5970 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5971 return IEMOP_RAISE_INVALID_OPCODE();
5972 }
5973 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5974 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5975}
5976
5977
5978/**
5979 * @opcode 0xc5
5980 */
5981FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5982{
5983 /* The LDS instruction is invalid 64-bit mode. In legacy and
5984 compatability mode it is invalid with MOD=3.
5985 The use as a VEX prefix is made possible by assigning the inverted
5986 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5987 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5989 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5990 {
5991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5992 {
5993 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5994 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5995 }
5996 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5997 }
5998
5999 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6000 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6001 {
6002 /** @todo Test when exctly the VEX conformance checks kick in during
6003 * instruction decoding and fetching (using \#PF). */
6004 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6005 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6006 if ( ( pVCpu->iem.s.fPrefixes
6007 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6008 == 0)
6009 {
6010 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6011 if (bVex2 & 0x80 /* VEX.W */)
6012 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6013 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6014 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6015 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6016 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6017 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6018 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6019
6020 switch (bRm & 0x1f)
6021 {
6022 case 1: /* 0x0f lead opcode byte. */
6023 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6024
6025 case 2: /* 0x0f 0x38 lead opcode bytes. */
6026 /** @todo VEX: Just use new tables and decoders. */
6027 IEMOP_BITCH_ABOUT_STUB();
6028 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6029
6030 case 3: /* 0x0f 0x3a lead opcode bytes. */
6031 /** @todo VEX: Just use new tables and decoders. */
6032 IEMOP_BITCH_ABOUT_STUB();
6033 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6034
6035 default:
6036 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6037 return IEMOP_RAISE_INVALID_OPCODE();
6038 }
6039 }
6040 else
6041 Log(("VEX3: Invalid prefix mix!\n"));
6042 }
6043 else
6044 Log(("VEX3: AVX support disabled!\n"));
6045 return IEMOP_RAISE_INVALID_OPCODE();
6046}
6047
6048
6049/**
6050 * @opcode 0xc6
6051 */
6052FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6053{
6054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6055 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6056 return IEMOP_RAISE_INVALID_OPCODE();
6057 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6058
6059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6060 {
6061 /* register access */
6062 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6064 IEM_MC_BEGIN(0, 0);
6065 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6066 IEM_MC_ADVANCE_RIP();
6067 IEM_MC_END();
6068 }
6069 else
6070 {
6071 /* memory access. */
6072 IEM_MC_BEGIN(0, 1);
6073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6077 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 }
6081 return VINF_SUCCESS;
6082}
6083
6084
6085/**
6086 * @opcode 0xc7
6087 */
6088FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6089{
6090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6091 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6092 return IEMOP_RAISE_INVALID_OPCODE();
6093 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6094
6095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6096 {
6097 /* register access */
6098 switch (pVCpu->iem.s.enmEffOpSize)
6099 {
6100 case IEMMODE_16BIT:
6101 IEM_MC_BEGIN(0, 0);
6102 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6104 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6105 IEM_MC_ADVANCE_RIP();
6106 IEM_MC_END();
6107 return VINF_SUCCESS;
6108
6109 case IEMMODE_32BIT:
6110 IEM_MC_BEGIN(0, 0);
6111 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6113 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 return VINF_SUCCESS;
6117
6118 case IEMMODE_64BIT:
6119 IEM_MC_BEGIN(0, 0);
6120 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6122 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6123 IEM_MC_ADVANCE_RIP();
6124 IEM_MC_END();
6125 return VINF_SUCCESS;
6126
6127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6128 }
6129 }
6130 else
6131 {
6132 /* memory access. */
6133 switch (pVCpu->iem.s.enmEffOpSize)
6134 {
6135 case IEMMODE_16BIT:
6136 IEM_MC_BEGIN(0, 1);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6139 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6142 IEM_MC_ADVANCE_RIP();
6143 IEM_MC_END();
6144 return VINF_SUCCESS;
6145
6146 case IEMMODE_32BIT:
6147 IEM_MC_BEGIN(0, 1);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6150 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6152 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6153 IEM_MC_ADVANCE_RIP();
6154 IEM_MC_END();
6155 return VINF_SUCCESS;
6156
6157 case IEMMODE_64BIT:
6158 IEM_MC_BEGIN(0, 1);
6159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6161 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6163 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6164 IEM_MC_ADVANCE_RIP();
6165 IEM_MC_END();
6166 return VINF_SUCCESS;
6167
6168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6169 }
6170 }
6171}
6172
6173
6174
6175
6176/**
6177 * @opcode 0xc8
6178 */
6179FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6180{
6181 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6182 IEMOP_HLP_MIN_186();
6183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6184 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6185 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6187 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6188}
6189
6190
6191/**
6192 * @opcode 0xc9
6193 */
6194FNIEMOP_DEF(iemOp_leave)
6195{
6196 IEMOP_MNEMONIC(leave, "leave");
6197 IEMOP_HLP_MIN_186();
6198 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6200 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6201}
6202
6203
6204/**
6205 * @opcode 0xca
6206 */
6207FNIEMOP_DEF(iemOp_retf_Iw)
6208{
6209 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6210 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6213 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6214}
6215
6216
6217/**
6218 * @opcode 0xcb
6219 */
6220FNIEMOP_DEF(iemOp_retf)
6221{
6222 IEMOP_MNEMONIC(retf, "retf");
6223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6225 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6226}
6227
6228
6229/**
6230 * @opcode 0xcc
6231 */
6232FNIEMOP_DEF(iemOp_int3)
6233{
6234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6235 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6236}
6237
6238
6239/**
6240 * @opcode 0xcd
6241 */
6242FNIEMOP_DEF(iemOp_int_Ib)
6243{
6244 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6246 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6247}
6248
6249
6250/**
6251 * @opcode 0xce
6252 */
6253FNIEMOP_DEF(iemOp_into)
6254{
6255 IEMOP_MNEMONIC(into, "into");
6256 IEMOP_HLP_NO_64BIT();
6257
6258 IEM_MC_BEGIN(2, 0);
6259 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6260 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6261 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6262 IEM_MC_END();
6263 return VINF_SUCCESS;
6264}
6265
6266
6267/**
6268 * @opcode 0xcf
6269 */
6270FNIEMOP_DEF(iemOp_iret)
6271{
6272 IEMOP_MNEMONIC(iret, "iret");
6273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6274 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6275}
6276
6277
6278/**
6279 * @opcode 0xd0
6280 */
6281FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6282{
6283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6284 PCIEMOPSHIFTSIZES pImpl;
6285 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6286 {
6287 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6288 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6289 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6290 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6291 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6292 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6293 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6294 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6295 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6296 }
6297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6298
6299 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6300 {
6301 /* register */
6302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6303 IEM_MC_BEGIN(3, 0);
6304 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6305 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6306 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6307 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6308 IEM_MC_REF_EFLAGS(pEFlags);
6309 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6310 IEM_MC_ADVANCE_RIP();
6311 IEM_MC_END();
6312 }
6313 else
6314 {
6315 /* memory */
6316 IEM_MC_BEGIN(3, 2);
6317 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6318 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6321
6322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6324 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6325 IEM_MC_FETCH_EFLAGS(EFlags);
6326 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6327
6328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6329 IEM_MC_COMMIT_EFLAGS(EFlags);
6330 IEM_MC_ADVANCE_RIP();
6331 IEM_MC_END();
6332 }
6333 return VINF_SUCCESS;
6334}
6335
6336
6337
6338/**
6339 * @opcode 0xd1
6340 */
6341FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6342{
6343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6344 PCIEMOPSHIFTSIZES pImpl;
6345 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6346 {
6347 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6348 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6349 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6350 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6351 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6352 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6353 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6354 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6355 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6356 }
6357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6358
6359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6360 {
6361 /* register */
6362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6363 switch (pVCpu->iem.s.enmEffOpSize)
6364 {
6365 case IEMMODE_16BIT:
6366 IEM_MC_BEGIN(3, 0);
6367 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6368 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6369 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6370 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6371 IEM_MC_REF_EFLAGS(pEFlags);
6372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6373 IEM_MC_ADVANCE_RIP();
6374 IEM_MC_END();
6375 return VINF_SUCCESS;
6376
6377 case IEMMODE_32BIT:
6378 IEM_MC_BEGIN(3, 0);
6379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6380 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6382 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6383 IEM_MC_REF_EFLAGS(pEFlags);
6384 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6385 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6386 IEM_MC_ADVANCE_RIP();
6387 IEM_MC_END();
6388 return VINF_SUCCESS;
6389
6390 case IEMMODE_64BIT:
6391 IEM_MC_BEGIN(3, 0);
6392 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6393 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6394 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6395 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6396 IEM_MC_REF_EFLAGS(pEFlags);
6397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6403 }
6404 }
6405 else
6406 {
6407 /* memory */
6408 switch (pVCpu->iem.s.enmEffOpSize)
6409 {
6410 case IEMMODE_16BIT:
6411 IEM_MC_BEGIN(3, 2);
6412 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6413 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6414 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6416
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6420 IEM_MC_FETCH_EFLAGS(EFlags);
6421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6422
6423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6424 IEM_MC_COMMIT_EFLAGS(EFlags);
6425 IEM_MC_ADVANCE_RIP();
6426 IEM_MC_END();
6427 return VINF_SUCCESS;
6428
6429 case IEMMODE_32BIT:
6430 IEM_MC_BEGIN(3, 2);
6431 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6432 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6433 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6435
6436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6438 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6439 IEM_MC_FETCH_EFLAGS(EFlags);
6440 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6441
6442 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6443 IEM_MC_COMMIT_EFLAGS(EFlags);
6444 IEM_MC_ADVANCE_RIP();
6445 IEM_MC_END();
6446 return VINF_SUCCESS;
6447
6448 case IEMMODE_64BIT:
6449 IEM_MC_BEGIN(3, 2);
6450 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6451 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6452 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6454
6455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6457 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6458 IEM_MC_FETCH_EFLAGS(EFlags);
6459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6460
6461 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6462 IEM_MC_COMMIT_EFLAGS(EFlags);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 return VINF_SUCCESS;
6466
6467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6468 }
6469 }
6470}
6471
6472
6473/**
6474 * @opcode 0xd2
6475 */
6476FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6477{
6478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6479 PCIEMOPSHIFTSIZES pImpl;
6480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6481 {
6482 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6483 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6484 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6485 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6486 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6487 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6488 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6489 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6491 }
6492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6493
6494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6495 {
6496 /* register */
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498 IEM_MC_BEGIN(3, 0);
6499 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6500 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6502 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6503 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6504 IEM_MC_REF_EFLAGS(pEFlags);
6505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6506 IEM_MC_ADVANCE_RIP();
6507 IEM_MC_END();
6508 }
6509 else
6510 {
6511 /* memory */
6512 IEM_MC_BEGIN(3, 2);
6513 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6514 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6515 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6517
6518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6520 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6521 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6522 IEM_MC_FETCH_EFLAGS(EFlags);
6523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6524
6525 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6526 IEM_MC_COMMIT_EFLAGS(EFlags);
6527 IEM_MC_ADVANCE_RIP();
6528 IEM_MC_END();
6529 }
6530 return VINF_SUCCESS;
6531}
6532
6533
6534/**
6535 * @opcode 0xd3
6536 */
6537FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6538{
6539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6540 PCIEMOPSHIFTSIZES pImpl;
6541 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6542 {
6543 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6544 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6545 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6546 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6547 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6548 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6549 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6550 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6551 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6552 }
6553 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6554
6555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6556 {
6557 /* register */
6558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6559 switch (pVCpu->iem.s.enmEffOpSize)
6560 {
6561 case IEMMODE_16BIT:
6562 IEM_MC_BEGIN(3, 0);
6563 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6564 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6565 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6566 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6567 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6568 IEM_MC_REF_EFLAGS(pEFlags);
6569 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_32BIT:
6575 IEM_MC_BEGIN(3, 0);
6576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6577 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6579 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6580 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6581 IEM_MC_REF_EFLAGS(pEFlags);
6582 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6583 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6584 IEM_MC_ADVANCE_RIP();
6585 IEM_MC_END();
6586 return VINF_SUCCESS;
6587
6588 case IEMMODE_64BIT:
6589 IEM_MC_BEGIN(3, 0);
6590 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6591 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6592 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6593 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6594 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6595 IEM_MC_REF_EFLAGS(pEFlags);
6596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 return VINF_SUCCESS;
6600
6601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6602 }
6603 }
6604 else
6605 {
6606 /* memory */
6607 switch (pVCpu->iem.s.enmEffOpSize)
6608 {
6609 case IEMMODE_16BIT:
6610 IEM_MC_BEGIN(3, 2);
6611 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6612 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6613 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6615
6616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6618 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6619 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6620 IEM_MC_FETCH_EFLAGS(EFlags);
6621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6622
6623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6624 IEM_MC_COMMIT_EFLAGS(EFlags);
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 return VINF_SUCCESS;
6628
6629 case IEMMODE_32BIT:
6630 IEM_MC_BEGIN(3, 2);
6631 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6632 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6633 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6635
6636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6638 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6639 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6640 IEM_MC_FETCH_EFLAGS(EFlags);
6641 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6642
6643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6644 IEM_MC_COMMIT_EFLAGS(EFlags);
6645 IEM_MC_ADVANCE_RIP();
6646 IEM_MC_END();
6647 return VINF_SUCCESS;
6648
6649 case IEMMODE_64BIT:
6650 IEM_MC_BEGIN(3, 2);
6651 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6652 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6653 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6655
6656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6658 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6659 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6660 IEM_MC_FETCH_EFLAGS(EFlags);
6661 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6662
6663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6664 IEM_MC_COMMIT_EFLAGS(EFlags);
6665 IEM_MC_ADVANCE_RIP();
6666 IEM_MC_END();
6667 return VINF_SUCCESS;
6668
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 }
6672}
6673
6674/**
6675 * @opcode 0xd4
6676 */
6677FNIEMOP_DEF(iemOp_aam_Ib)
6678{
6679 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6680 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6682 IEMOP_HLP_NO_64BIT();
6683 if (!bImm)
6684 return IEMOP_RAISE_DIVIDE_ERROR();
6685 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6686}
6687
6688
6689/**
6690 * @opcode 0xd5
6691 */
6692FNIEMOP_DEF(iemOp_aad_Ib)
6693{
6694 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6695 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6697 IEMOP_HLP_NO_64BIT();
6698 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6699}
6700
6701
6702/**
6703 * @opcode 0xd6
6704 */
6705FNIEMOP_DEF(iemOp_salc)
6706{
6707 IEMOP_MNEMONIC(salc, "salc");
6708 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6709 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6711 IEMOP_HLP_NO_64BIT();
6712
6713 IEM_MC_BEGIN(0, 0);
6714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6715 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6716 } IEM_MC_ELSE() {
6717 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6718 } IEM_MC_ENDIF();
6719 IEM_MC_ADVANCE_RIP();
6720 IEM_MC_END();
6721 return VINF_SUCCESS;
6722}
6723
6724
6725/**
6726 * @opcode 0xd7
6727 */
6728FNIEMOP_DEF(iemOp_xlat)
6729{
6730 IEMOP_MNEMONIC(xlat, "xlat");
6731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6732 switch (pVCpu->iem.s.enmEffAddrMode)
6733 {
6734 case IEMMODE_16BIT:
6735 IEM_MC_BEGIN(2, 0);
6736 IEM_MC_LOCAL(uint8_t, u8Tmp);
6737 IEM_MC_LOCAL(uint16_t, u16Addr);
6738 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6739 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6740 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6741 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 return VINF_SUCCESS;
6745
6746 case IEMMODE_32BIT:
6747 IEM_MC_BEGIN(2, 0);
6748 IEM_MC_LOCAL(uint8_t, u8Tmp);
6749 IEM_MC_LOCAL(uint32_t, u32Addr);
6750 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6751 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6752 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6753 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6754 IEM_MC_ADVANCE_RIP();
6755 IEM_MC_END();
6756 return VINF_SUCCESS;
6757
6758 case IEMMODE_64BIT:
6759 IEM_MC_BEGIN(2, 0);
6760 IEM_MC_LOCAL(uint8_t, u8Tmp);
6761 IEM_MC_LOCAL(uint64_t, u64Addr);
6762 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6763 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6764 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6765 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6766 IEM_MC_ADVANCE_RIP();
6767 IEM_MC_END();
6768 return VINF_SUCCESS;
6769
6770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6771 }
6772}
6773
6774
6775/**
6776 * Common worker for FPU instructions working on ST0 and STn, and storing the
6777 * result in ST0.
6778 *
6779 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6780 */
6781FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6782{
6783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6784
6785 IEM_MC_BEGIN(3, 1);
6786 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6787 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6788 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6790
6791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6792 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6793 IEM_MC_PREPARE_FPU_USAGE();
6794 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6795 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6796 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6797 IEM_MC_ELSE()
6798 IEM_MC_FPU_STACK_UNDERFLOW(0);
6799 IEM_MC_ENDIF();
6800 IEM_MC_ADVANCE_RIP();
6801
6802 IEM_MC_END();
6803 return VINF_SUCCESS;
6804}
6805
6806
6807/**
6808 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6809 * flags.
6810 *
6811 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6812 */
6813FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6814{
6815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6816
6817 IEM_MC_BEGIN(3, 1);
6818 IEM_MC_LOCAL(uint16_t, u16Fsw);
6819 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6821 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6822
6823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6825 IEM_MC_PREPARE_FPU_USAGE();
6826 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6827 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6828 IEM_MC_UPDATE_FSW(u16Fsw);
6829 IEM_MC_ELSE()
6830 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6831 IEM_MC_ENDIF();
6832 IEM_MC_ADVANCE_RIP();
6833
6834 IEM_MC_END();
6835 return VINF_SUCCESS;
6836}
6837
6838
6839/**
6840 * Common worker for FPU instructions working on ST0 and STn, only affecting
6841 * flags, and popping when done.
6842 *
6843 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6844 */
6845FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6846{
6847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6848
6849 IEM_MC_BEGIN(3, 1);
6850 IEM_MC_LOCAL(uint16_t, u16Fsw);
6851 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6852 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6853 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6854
6855 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6856 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6857 IEM_MC_PREPARE_FPU_USAGE();
6858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6859 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6860 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6861 IEM_MC_ELSE()
6862 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6863 IEM_MC_ENDIF();
6864 IEM_MC_ADVANCE_RIP();
6865
6866 IEM_MC_END();
6867 return VINF_SUCCESS;
6868}
6869
6870
6871/** Opcode 0xd8 11/0. */
6872FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6873{
6874 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6875 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6876}
6877
6878
6879/** Opcode 0xd8 11/1. */
6880FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6881{
6882 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6883 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6884}
6885
6886
6887/** Opcode 0xd8 11/2. */
6888FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6889{
6890 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6891 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6892}
6893
6894
6895/** Opcode 0xd8 11/3. */
6896FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6897{
6898 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6899 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6900}
6901
6902
6903/** Opcode 0xd8 11/4. */
6904FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6905{
6906 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6907 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6908}
6909
6910
6911/** Opcode 0xd8 11/5. */
6912FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6913{
6914 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6915 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6916}
6917
6918
6919/** Opcode 0xd8 11/6. */
6920FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6921{
6922 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6923 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6924}
6925
6926
6927/** Opcode 0xd8 11/7. */
6928FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6929{
6930 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6931 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6932}
6933
6934
6935/**
6936 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6937 * the result in ST0.
6938 *
6939 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6940 */
6941FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6942{
6943 IEM_MC_BEGIN(3, 3);
6944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6945 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6946 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6947 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6949 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6950
6951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6953
6954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6956 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6957
6958 IEM_MC_PREPARE_FPU_USAGE();
6959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6960 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6961 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6962 IEM_MC_ELSE()
6963 IEM_MC_FPU_STACK_UNDERFLOW(0);
6964 IEM_MC_ENDIF();
6965 IEM_MC_ADVANCE_RIP();
6966
6967 IEM_MC_END();
6968 return VINF_SUCCESS;
6969}
6970
6971
6972/** Opcode 0xd8 !11/0. */
6973FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6974{
6975 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6976 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6977}
6978
6979
6980/** Opcode 0xd8 !11/1. */
6981FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6982{
6983 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6984 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6985}
6986
6987
6988/** Opcode 0xd8 !11/2. */
6989FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6990{
6991 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6992
6993 IEM_MC_BEGIN(3, 3);
6994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6995 IEM_MC_LOCAL(uint16_t, u16Fsw);
6996 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6997 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6998 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6999 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7000
7001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7003
7004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7006 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7007
7008 IEM_MC_PREPARE_FPU_USAGE();
7009 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7010 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7011 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7012 IEM_MC_ELSE()
7013 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7014 IEM_MC_ENDIF();
7015 IEM_MC_ADVANCE_RIP();
7016
7017 IEM_MC_END();
7018 return VINF_SUCCESS;
7019}
7020
7021
7022/** Opcode 0xd8 !11/3. */
7023FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7024{
7025 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7026
7027 IEM_MC_BEGIN(3, 3);
7028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7029 IEM_MC_LOCAL(uint16_t, u16Fsw);
7030 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7031 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7032 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7033 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7034
7035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7037
7038 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7039 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7040 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7041
7042 IEM_MC_PREPARE_FPU_USAGE();
7043 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7044 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7045 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7046 IEM_MC_ELSE()
7047 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7048 IEM_MC_ENDIF();
7049 IEM_MC_ADVANCE_RIP();
7050
7051 IEM_MC_END();
7052 return VINF_SUCCESS;
7053}
7054
7055
7056/** Opcode 0xd8 !11/4. */
7057FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7058{
7059 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7060 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7061}
7062
7063
7064/** Opcode 0xd8 !11/5. */
7065FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7066{
7067 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7068 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7069}
7070
7071
7072/** Opcode 0xd8 !11/6. */
7073FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7074{
7075 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7076 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7077}
7078
7079
7080/** Opcode 0xd8 !11/7. */
7081FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7082{
7083 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7084 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7085}
7086
7087
7088/**
7089 * @opcode 0xd8
7090 */
7091FNIEMOP_DEF(iemOp_EscF0)
7092{
7093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7094 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7095
7096 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7097 {
7098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7099 {
7100 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7101 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7102 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7103 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7104 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7105 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7106 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7107 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7109 }
7110 }
7111 else
7112 {
7113 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7114 {
7115 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7116 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7117 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7118 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7119 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7120 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7121 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7122 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7124 }
7125 }
7126}
7127
7128
7129/** Opcode 0xd9 /0 mem32real
7130 * @sa iemOp_fld_m64r */
7131FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7132{
7133 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7134
7135 IEM_MC_BEGIN(2, 3);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7137 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7138 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7139 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7140 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7141
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144
7145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7146 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7147 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7148
7149 IEM_MC_PREPARE_FPU_USAGE();
7150 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7151 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7152 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7153 IEM_MC_ELSE()
7154 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7155 IEM_MC_ENDIF();
7156 IEM_MC_ADVANCE_RIP();
7157
7158 IEM_MC_END();
7159 return VINF_SUCCESS;
7160}
7161
7162
7163/** Opcode 0xd9 !11/2 mem32real */
7164FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7165{
7166 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7167 IEM_MC_BEGIN(3, 2);
7168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7169 IEM_MC_LOCAL(uint16_t, u16Fsw);
7170 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7171 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7173
7174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7176 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7177 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7178
7179 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7180 IEM_MC_PREPARE_FPU_USAGE();
7181 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7182 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7183 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7184 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7185 IEM_MC_ELSE()
7186 IEM_MC_IF_FCW_IM()
7187 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7188 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7189 IEM_MC_ENDIF();
7190 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7191 IEM_MC_ENDIF();
7192 IEM_MC_ADVANCE_RIP();
7193
7194 IEM_MC_END();
7195 return VINF_SUCCESS;
7196}
7197
7198
7199/** Opcode 0xd9 !11/3 */
7200FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7201{
7202 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7203 IEM_MC_BEGIN(3, 2);
7204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7205 IEM_MC_LOCAL(uint16_t, u16Fsw);
7206 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7207 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7208 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7209
7210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7213 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7214
7215 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7216 IEM_MC_PREPARE_FPU_USAGE();
7217 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7218 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7219 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7220 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7221 IEM_MC_ELSE()
7222 IEM_MC_IF_FCW_IM()
7223 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7224 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7225 IEM_MC_ENDIF();
7226 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7227 IEM_MC_ENDIF();
7228 IEM_MC_ADVANCE_RIP();
7229
7230 IEM_MC_END();
7231 return VINF_SUCCESS;
7232}
7233
7234
7235/** Opcode 0xd9 !11/4 */
7236FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7237{
7238 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7239 IEM_MC_BEGIN(3, 0);
7240 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7241 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7242 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7245 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7246 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7247 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7248 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7249 IEM_MC_END();
7250 return VINF_SUCCESS;
7251}
7252
7253
7254/** Opcode 0xd9 !11/5 */
7255FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7256{
7257 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7258 IEM_MC_BEGIN(1, 1);
7259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7260 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7264 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7265 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7266 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7267 IEM_MC_END();
7268 return VINF_SUCCESS;
7269}
7270
7271
7272/** Opcode 0xd9 !11/6 */
7273FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7274{
7275 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7276 IEM_MC_BEGIN(3, 0);
7277 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7278 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7279 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7283 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7284 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7285 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7286 IEM_MC_END();
7287 return VINF_SUCCESS;
7288}
7289
7290
7291/** Opcode 0xd9 !11/7 */
7292FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7293{
7294 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7295 IEM_MC_BEGIN(2, 0);
7296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7297 IEM_MC_LOCAL(uint16_t, u16Fcw);
7298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7301 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7302 IEM_MC_FETCH_FCW(u16Fcw);
7303 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7304 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7305 IEM_MC_END();
7306 return VINF_SUCCESS;
7307}
7308
7309
7310/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7311FNIEMOP_DEF(iemOp_fnop)
7312{
7313 IEMOP_MNEMONIC(fnop, "fnop");
7314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7315
7316 IEM_MC_BEGIN(0, 0);
7317 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7318 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7319 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7320 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7321 * intel optimizations. Investigate. */
7322 IEM_MC_UPDATE_FPU_OPCODE_IP();
7323 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7324 IEM_MC_END();
7325 return VINF_SUCCESS;
7326}
7327
7328
7329/** Opcode 0xd9 11/0 stN */
7330FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7331{
7332 IEMOP_MNEMONIC(fld_stN, "fld stN");
7333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7334
7335 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7336 * indicates that it does. */
7337 IEM_MC_BEGIN(0, 2);
7338 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7339 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7340 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7341 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7342
7343 IEM_MC_PREPARE_FPU_USAGE();
7344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7345 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7346 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7347 IEM_MC_ELSE()
7348 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7349 IEM_MC_ENDIF();
7350
7351 IEM_MC_ADVANCE_RIP();
7352 IEM_MC_END();
7353
7354 return VINF_SUCCESS;
7355}
7356
7357
7358/** Opcode 0xd9 11/3 stN */
7359FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7360{
7361 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7363
7364 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7365 * indicates that it does. */
7366 IEM_MC_BEGIN(1, 3);
7367 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7368 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7369 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7370 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7373
7374 IEM_MC_PREPARE_FPU_USAGE();
7375 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7376 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7377 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7378 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7379 IEM_MC_ELSE()
7380 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7381 IEM_MC_ENDIF();
7382
7383 IEM_MC_ADVANCE_RIP();
7384 IEM_MC_END();
7385
7386 return VINF_SUCCESS;
7387}
7388
7389
7390/** Opcode 0xd9 11/4, 0xdd 11/2. */
7391FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7392{
7393 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7395
7396 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7397 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7398 if (!iDstReg)
7399 {
7400 IEM_MC_BEGIN(0, 1);
7401 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7404
7405 IEM_MC_PREPARE_FPU_USAGE();
7406 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7407 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7408 IEM_MC_ELSE()
7409 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7410 IEM_MC_ENDIF();
7411
7412 IEM_MC_ADVANCE_RIP();
7413 IEM_MC_END();
7414 }
7415 else
7416 {
7417 IEM_MC_BEGIN(0, 2);
7418 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7419 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7422
7423 IEM_MC_PREPARE_FPU_USAGE();
7424 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7425 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7426 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7427 IEM_MC_ELSE()
7428 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7429 IEM_MC_ENDIF();
7430
7431 IEM_MC_ADVANCE_RIP();
7432 IEM_MC_END();
7433 }
7434 return VINF_SUCCESS;
7435}
7436
7437
7438/**
7439 * Common worker for FPU instructions working on ST0 and replaces it with the
7440 * result, i.e. unary operators.
7441 *
7442 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7443 */
7444FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7445{
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7447
7448 IEM_MC_BEGIN(2, 1);
7449 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7450 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7452
7453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7455 IEM_MC_PREPARE_FPU_USAGE();
7456 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7457 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7458 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7459 IEM_MC_ELSE()
7460 IEM_MC_FPU_STACK_UNDERFLOW(0);
7461 IEM_MC_ENDIF();
7462 IEM_MC_ADVANCE_RIP();
7463
7464 IEM_MC_END();
7465 return VINF_SUCCESS;
7466}
7467
7468
7469/** Opcode 0xd9 0xe0. */
7470FNIEMOP_DEF(iemOp_fchs)
7471{
7472 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7473 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7474}
7475
7476
7477/** Opcode 0xd9 0xe1. */
7478FNIEMOP_DEF(iemOp_fabs)
7479{
7480 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7481 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7482}
7483
7484
7485/**
7486 * Common worker for FPU instructions working on ST0 and only returns FSW.
7487 *
7488 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7489 */
7490FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7491{
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493
7494 IEM_MC_BEGIN(2, 1);
7495 IEM_MC_LOCAL(uint16_t, u16Fsw);
7496 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7498
7499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7501 IEM_MC_PREPARE_FPU_USAGE();
7502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7503 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7504 IEM_MC_UPDATE_FSW(u16Fsw);
7505 IEM_MC_ELSE()
7506 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7507 IEM_MC_ENDIF();
7508 IEM_MC_ADVANCE_RIP();
7509
7510 IEM_MC_END();
7511 return VINF_SUCCESS;
7512}
7513
7514
7515/** Opcode 0xd9 0xe4. */
7516FNIEMOP_DEF(iemOp_ftst)
7517{
7518 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7519 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7520}
7521
7522
7523/** Opcode 0xd9 0xe5. */
7524FNIEMOP_DEF(iemOp_fxam)
7525{
7526 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7527 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7528}
7529
7530
7531/**
7532 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7533 *
7534 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7535 */
7536FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7537{
7538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7539
7540 IEM_MC_BEGIN(1, 1);
7541 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7542 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7543
7544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7546 IEM_MC_PREPARE_FPU_USAGE();
7547 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7548 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7549 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7550 IEM_MC_ELSE()
7551 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7552 IEM_MC_ENDIF();
7553 IEM_MC_ADVANCE_RIP();
7554
7555 IEM_MC_END();
7556 return VINF_SUCCESS;
7557}
7558
7559
7560/** Opcode 0xd9 0xe8. */
7561FNIEMOP_DEF(iemOp_fld1)
7562{
7563 IEMOP_MNEMONIC(fld1, "fld1");
7564 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7565}
7566
7567
7568/** Opcode 0xd9 0xe9. */
7569FNIEMOP_DEF(iemOp_fldl2t)
7570{
7571 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7572 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7573}
7574
7575
7576/** Opcode 0xd9 0xea. */
7577FNIEMOP_DEF(iemOp_fldl2e)
7578{
7579 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7580 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7581}
7582
7583/** Opcode 0xd9 0xeb. */
7584FNIEMOP_DEF(iemOp_fldpi)
7585{
7586 IEMOP_MNEMONIC(fldpi, "fldpi");
7587 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7588}
7589
7590
7591/** Opcode 0xd9 0xec. */
7592FNIEMOP_DEF(iemOp_fldlg2)
7593{
7594 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7595 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7596}
7597
7598/** Opcode 0xd9 0xed. */
7599FNIEMOP_DEF(iemOp_fldln2)
7600{
7601 IEMOP_MNEMONIC(fldln2, "fldln2");
7602 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7603}
7604
7605
7606/** Opcode 0xd9 0xee. */
7607FNIEMOP_DEF(iemOp_fldz)
7608{
7609 IEMOP_MNEMONIC(fldz, "fldz");
7610 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7611}
7612
7613
7614/** Opcode 0xd9 0xf0. */
7615FNIEMOP_DEF(iemOp_f2xm1)
7616{
7617 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7618 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7619}
7620
7621
7622/**
7623 * Common worker for FPU instructions working on STn and ST0, storing the result
7624 * in STn, and popping the stack unless IE, DE or ZE was raised.
7625 *
7626 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7627 */
7628FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7629{
7630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7631
7632 IEM_MC_BEGIN(3, 1);
7633 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7634 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7636 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7637
7638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7640
7641 IEM_MC_PREPARE_FPU_USAGE();
7642 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7643 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7644 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7645 IEM_MC_ELSE()
7646 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7647 IEM_MC_ENDIF();
7648 IEM_MC_ADVANCE_RIP();
7649
7650 IEM_MC_END();
7651 return VINF_SUCCESS;
7652}
7653
7654
7655/** Opcode 0xd9 0xf1. */
7656FNIEMOP_DEF(iemOp_fyl2x)
7657{
7658 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7659 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7660}
7661
7662
7663/**
7664 * Common worker for FPU instructions working on ST0 and having two outputs, one
7665 * replacing ST0 and one pushed onto the stack.
7666 *
7667 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7668 */
7669FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7670{
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7672
7673 IEM_MC_BEGIN(2, 1);
7674 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7675 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7676 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7677
7678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7680 IEM_MC_PREPARE_FPU_USAGE();
7681 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7682 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7683 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7684 IEM_MC_ELSE()
7685 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7686 IEM_MC_ENDIF();
7687 IEM_MC_ADVANCE_RIP();
7688
7689 IEM_MC_END();
7690 return VINF_SUCCESS;
7691}
7692
7693
7694/** Opcode 0xd9 0xf2. */
7695FNIEMOP_DEF(iemOp_fptan)
7696{
7697 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7698 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7699}
7700
7701
7702/** Opcode 0xd9 0xf3. */
7703FNIEMOP_DEF(iemOp_fpatan)
7704{
7705 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7706 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7707}
7708
7709
7710/** Opcode 0xd9 0xf4. */
7711FNIEMOP_DEF(iemOp_fxtract)
7712{
7713 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7714 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7715}
7716
7717
7718/** Opcode 0xd9 0xf5. */
7719FNIEMOP_DEF(iemOp_fprem1)
7720{
7721 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7722 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7723}
7724
7725
7726/** Opcode 0xd9 0xf6. */
7727FNIEMOP_DEF(iemOp_fdecstp)
7728{
7729 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7731 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7732 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7733 * FINCSTP and FDECSTP. */
7734
7735 IEM_MC_BEGIN(0,0);
7736
7737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7739
7740 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7741 IEM_MC_FPU_STACK_DEC_TOP();
7742 IEM_MC_UPDATE_FSW_CONST(0);
7743
7744 IEM_MC_ADVANCE_RIP();
7745 IEM_MC_END();
7746 return VINF_SUCCESS;
7747}
7748
7749
7750/** Opcode 0xd9 0xf7. */
7751FNIEMOP_DEF(iemOp_fincstp)
7752{
7753 IEMOP_MNEMONIC(fincstp, "fincstp");
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7756 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7757 * FINCSTP and FDECSTP. */
7758
7759 IEM_MC_BEGIN(0,0);
7760
7761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7763
7764 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7765 IEM_MC_FPU_STACK_INC_TOP();
7766 IEM_MC_UPDATE_FSW_CONST(0);
7767
7768 IEM_MC_ADVANCE_RIP();
7769 IEM_MC_END();
7770 return VINF_SUCCESS;
7771}
7772
7773
7774/** Opcode 0xd9 0xf8. */
7775FNIEMOP_DEF(iemOp_fprem)
7776{
7777 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7778 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7779}
7780
7781
7782/** Opcode 0xd9 0xf9. */
7783FNIEMOP_DEF(iemOp_fyl2xp1)
7784{
7785 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7786 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7787}
7788
7789
7790/** Opcode 0xd9 0xfa. */
7791FNIEMOP_DEF(iemOp_fsqrt)
7792{
7793 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7794 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7795}
7796
7797
7798/** Opcode 0xd9 0xfb. */
7799FNIEMOP_DEF(iemOp_fsincos)
7800{
7801 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7802 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7803}
7804
7805
7806/** Opcode 0xd9 0xfc. */
7807FNIEMOP_DEF(iemOp_frndint)
7808{
7809 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7810 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7811}
7812
7813
7814/** Opcode 0xd9 0xfd. */
7815FNIEMOP_DEF(iemOp_fscale)
7816{
7817 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7818 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7819}
7820
7821
7822/** Opcode 0xd9 0xfe. */
7823FNIEMOP_DEF(iemOp_fsin)
7824{
7825 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7826 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7827}
7828
7829
7830/** Opcode 0xd9 0xff. */
7831FNIEMOP_DEF(iemOp_fcos)
7832{
7833 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7834 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7835}
7836
7837
7838/** Used by iemOp_EscF1. */
7839IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7840{
7841 /* 0xe0 */ iemOp_fchs,
7842 /* 0xe1 */ iemOp_fabs,
7843 /* 0xe2 */ iemOp_Invalid,
7844 /* 0xe3 */ iemOp_Invalid,
7845 /* 0xe4 */ iemOp_ftst,
7846 /* 0xe5 */ iemOp_fxam,
7847 /* 0xe6 */ iemOp_Invalid,
7848 /* 0xe7 */ iemOp_Invalid,
7849 /* 0xe8 */ iemOp_fld1,
7850 /* 0xe9 */ iemOp_fldl2t,
7851 /* 0xea */ iemOp_fldl2e,
7852 /* 0xeb */ iemOp_fldpi,
7853 /* 0xec */ iemOp_fldlg2,
7854 /* 0xed */ iemOp_fldln2,
7855 /* 0xee */ iemOp_fldz,
7856 /* 0xef */ iemOp_Invalid,
7857 /* 0xf0 */ iemOp_f2xm1,
7858 /* 0xf1 */ iemOp_fyl2x,
7859 /* 0xf2 */ iemOp_fptan,
7860 /* 0xf3 */ iemOp_fpatan,
7861 /* 0xf4 */ iemOp_fxtract,
7862 /* 0xf5 */ iemOp_fprem1,
7863 /* 0xf6 */ iemOp_fdecstp,
7864 /* 0xf7 */ iemOp_fincstp,
7865 /* 0xf8 */ iemOp_fprem,
7866 /* 0xf9 */ iemOp_fyl2xp1,
7867 /* 0xfa */ iemOp_fsqrt,
7868 /* 0xfb */ iemOp_fsincos,
7869 /* 0xfc */ iemOp_frndint,
7870 /* 0xfd */ iemOp_fscale,
7871 /* 0xfe */ iemOp_fsin,
7872 /* 0xff */ iemOp_fcos
7873};
7874
7875
7876/**
7877 * @opcode 0xd9
7878 */
7879FNIEMOP_DEF(iemOp_EscF1)
7880{
7881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7882 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7883
7884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7885 {
7886 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7887 {
7888 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7889 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7890 case 2:
7891 if (bRm == 0xd0)
7892 return FNIEMOP_CALL(iemOp_fnop);
7893 return IEMOP_RAISE_INVALID_OPCODE();
7894 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7895 case 4:
7896 case 5:
7897 case 6:
7898 case 7:
7899 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7900 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7902 }
7903 }
7904 else
7905 {
7906 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7907 {
7908 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7909 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7910 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7911 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7912 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7913 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7914 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7915 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7917 }
7918 }
7919}
7920
7921
7922/** Opcode 0xda 11/0. */
7923FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7924{
7925 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7927
7928 IEM_MC_BEGIN(0, 1);
7929 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7930
7931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7933
7934 IEM_MC_PREPARE_FPU_USAGE();
7935 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7937 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7938 IEM_MC_ENDIF();
7939 IEM_MC_UPDATE_FPU_OPCODE_IP();
7940 IEM_MC_ELSE()
7941 IEM_MC_FPU_STACK_UNDERFLOW(0);
7942 IEM_MC_ENDIF();
7943 IEM_MC_ADVANCE_RIP();
7944
7945 IEM_MC_END();
7946 return VINF_SUCCESS;
7947}
7948
7949
7950/** Opcode 0xda 11/1. */
7951FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7952{
7953 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7955
7956 IEM_MC_BEGIN(0, 1);
7957 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7958
7959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7961
7962 IEM_MC_PREPARE_FPU_USAGE();
7963 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7965 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7966 IEM_MC_ENDIF();
7967 IEM_MC_UPDATE_FPU_OPCODE_IP();
7968 IEM_MC_ELSE()
7969 IEM_MC_FPU_STACK_UNDERFLOW(0);
7970 IEM_MC_ENDIF();
7971 IEM_MC_ADVANCE_RIP();
7972
7973 IEM_MC_END();
7974 return VINF_SUCCESS;
7975}
7976
7977
7978/** Opcode 0xda 11/2. */
7979FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7980{
7981 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983
7984 IEM_MC_BEGIN(0, 1);
7985 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7986
7987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7989
7990 IEM_MC_PREPARE_FPU_USAGE();
7991 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7992 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7993 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7994 IEM_MC_ENDIF();
7995 IEM_MC_UPDATE_FPU_OPCODE_IP();
7996 IEM_MC_ELSE()
7997 IEM_MC_FPU_STACK_UNDERFLOW(0);
7998 IEM_MC_ENDIF();
7999 IEM_MC_ADVANCE_RIP();
8000
8001 IEM_MC_END();
8002 return VINF_SUCCESS;
8003}
8004
8005
8006/** Opcode 0xda 11/3. */
8007FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8008{
8009 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011
8012 IEM_MC_BEGIN(0, 1);
8013 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8014
8015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8017
8018 IEM_MC_PREPARE_FPU_USAGE();
8019 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8021 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8022 IEM_MC_ENDIF();
8023 IEM_MC_UPDATE_FPU_OPCODE_IP();
8024 IEM_MC_ELSE()
8025 IEM_MC_FPU_STACK_UNDERFLOW(0);
8026 IEM_MC_ENDIF();
8027 IEM_MC_ADVANCE_RIP();
8028
8029 IEM_MC_END();
8030 return VINF_SUCCESS;
8031}
8032
8033
8034/**
8035 * Common worker for FPU instructions working on ST0 and STn, only affecting
8036 * flags, and popping twice when done.
8037 *
8038 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8039 */
8040FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8041{
8042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8043
8044 IEM_MC_BEGIN(3, 1);
8045 IEM_MC_LOCAL(uint16_t, u16Fsw);
8046 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8047 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8048 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8049
8050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8052
8053 IEM_MC_PREPARE_FPU_USAGE();
8054 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8055 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8056 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8057 IEM_MC_ELSE()
8058 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8059 IEM_MC_ENDIF();
8060 IEM_MC_ADVANCE_RIP();
8061
8062 IEM_MC_END();
8063 return VINF_SUCCESS;
8064}
8065
8066
8067/** Opcode 0xda 0xe9. */
8068FNIEMOP_DEF(iemOp_fucompp)
8069{
8070 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8071 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8072}
8073
8074
8075/**
8076 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8077 * the result in ST0.
8078 *
8079 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8080 */
8081FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8082{
8083 IEM_MC_BEGIN(3, 3);
8084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8085 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8086 IEM_MC_LOCAL(int32_t, i32Val2);
8087 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8089 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8090
8091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8093
8094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8096 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8097
8098 IEM_MC_PREPARE_FPU_USAGE();
8099 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8100 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8101 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8102 IEM_MC_ELSE()
8103 IEM_MC_FPU_STACK_UNDERFLOW(0);
8104 IEM_MC_ENDIF();
8105 IEM_MC_ADVANCE_RIP();
8106
8107 IEM_MC_END();
8108 return VINF_SUCCESS;
8109}
8110
8111
8112/** Opcode 0xda !11/0. */
8113FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8114{
8115 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8117}
8118
8119
8120/** Opcode 0xda !11/1. */
8121FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8122{
8123 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8124 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8125}
8126
8127
8128/** Opcode 0xda !11/2. */
8129FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8130{
8131 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8132
8133 IEM_MC_BEGIN(3, 3);
8134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8135 IEM_MC_LOCAL(uint16_t, u16Fsw);
8136 IEM_MC_LOCAL(int32_t, i32Val2);
8137 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8138 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8139 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8140
8141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8143
8144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8146 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8147
8148 IEM_MC_PREPARE_FPU_USAGE();
8149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8150 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8151 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8152 IEM_MC_ELSE()
8153 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8154 IEM_MC_ENDIF();
8155 IEM_MC_ADVANCE_RIP();
8156
8157 IEM_MC_END();
8158 return VINF_SUCCESS;
8159}
8160
8161
8162/** Opcode 0xda !11/3. */
8163FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8164{
8165 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8166
8167 IEM_MC_BEGIN(3, 3);
8168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8169 IEM_MC_LOCAL(uint16_t, u16Fsw);
8170 IEM_MC_LOCAL(int32_t, i32Val2);
8171 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8173 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8174
8175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8177
8178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8179 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8180 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8181
8182 IEM_MC_PREPARE_FPU_USAGE();
8183 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8184 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8185 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8186 IEM_MC_ELSE()
8187 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8188 IEM_MC_ENDIF();
8189 IEM_MC_ADVANCE_RIP();
8190
8191 IEM_MC_END();
8192 return VINF_SUCCESS;
8193}
8194
8195
8196/** Opcode 0xda !11/4. */
8197FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8198{
8199 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8201}
8202
8203
8204/** Opcode 0xda !11/5. */
8205FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8206{
8207 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8208 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8209}
8210
8211
8212/** Opcode 0xda !11/6. */
8213FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8214{
8215 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8216 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8217}
8218
8219
8220/** Opcode 0xda !11/7. */
8221FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8222{
8223 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8224 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8225}
8226
8227
8228/**
8229 * @opcode 0xda
8230 */
8231FNIEMOP_DEF(iemOp_EscF2)
8232{
8233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8234 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8235 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8236 {
8237 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8238 {
8239 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8240 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8241 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8242 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8243 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8244 case 5:
8245 if (bRm == 0xe9)
8246 return FNIEMOP_CALL(iemOp_fucompp);
8247 return IEMOP_RAISE_INVALID_OPCODE();
8248 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8249 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8251 }
8252 }
8253 else
8254 {
8255 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8256 {
8257 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8258 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8259 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8260 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8261 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8262 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8263 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8264 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8266 }
8267 }
8268}
8269
8270
8271/** Opcode 0xdb !11/0. */
8272FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8273{
8274 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8275
8276 IEM_MC_BEGIN(2, 3);
8277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8278 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8279 IEM_MC_LOCAL(int32_t, i32Val);
8280 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8281 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8282
8283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8285
8286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8287 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8288 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8289
8290 IEM_MC_PREPARE_FPU_USAGE();
8291 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8292 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8293 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8294 IEM_MC_ELSE()
8295 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8296 IEM_MC_ENDIF();
8297 IEM_MC_ADVANCE_RIP();
8298
8299 IEM_MC_END();
8300 return VINF_SUCCESS;
8301}
8302
8303
8304/** Opcode 0xdb !11/1. */
8305FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8306{
8307 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8308 IEM_MC_BEGIN(3, 2);
8309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8310 IEM_MC_LOCAL(uint16_t, u16Fsw);
8311 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8312 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8313 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8314
8315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8317 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8318 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8319
8320 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8321 IEM_MC_PREPARE_FPU_USAGE();
8322 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8323 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8324 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8325 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8326 IEM_MC_ELSE()
8327 IEM_MC_IF_FCW_IM()
8328 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8329 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8330 IEM_MC_ENDIF();
8331 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8332 IEM_MC_ENDIF();
8333 IEM_MC_ADVANCE_RIP();
8334
8335 IEM_MC_END();
8336 return VINF_SUCCESS;
8337}
8338
8339
8340/** Opcode 0xdb !11/2. */
8341FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8342{
8343 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8344 IEM_MC_BEGIN(3, 2);
8345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8346 IEM_MC_LOCAL(uint16_t, u16Fsw);
8347 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8348 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8350
8351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8355
8356 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8357 IEM_MC_PREPARE_FPU_USAGE();
8358 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8359 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8360 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8361 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8362 IEM_MC_ELSE()
8363 IEM_MC_IF_FCW_IM()
8364 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8365 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8366 IEM_MC_ENDIF();
8367 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8368 IEM_MC_ENDIF();
8369 IEM_MC_ADVANCE_RIP();
8370
8371 IEM_MC_END();
8372 return VINF_SUCCESS;
8373}
8374
8375
8376/** Opcode 0xdb !11/3. */
8377FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8378{
8379 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8380 IEM_MC_BEGIN(3, 2);
8381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8382 IEM_MC_LOCAL(uint16_t, u16Fsw);
8383 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8384 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8385 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8386
8387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8391
8392 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8393 IEM_MC_PREPARE_FPU_USAGE();
8394 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8395 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8396 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8397 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8398 IEM_MC_ELSE()
8399 IEM_MC_IF_FCW_IM()
8400 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8401 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8402 IEM_MC_ENDIF();
8403 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8404 IEM_MC_ENDIF();
8405 IEM_MC_ADVANCE_RIP();
8406
8407 IEM_MC_END();
8408 return VINF_SUCCESS;
8409}
8410
8411
8412/** Opcode 0xdb !11/5. */
8413FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8414{
8415 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8416
8417 IEM_MC_BEGIN(2, 3);
8418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8419 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8420 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8421 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8422 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8423
8424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8426
8427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8429 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8430
8431 IEM_MC_PREPARE_FPU_USAGE();
8432 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8433 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8434 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8435 IEM_MC_ELSE()
8436 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8437 IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP();
8439
8440 IEM_MC_END();
8441 return VINF_SUCCESS;
8442}
8443
8444
8445/** Opcode 0xdb !11/7. */
8446FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8447{
8448 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8449 IEM_MC_BEGIN(3, 2);
8450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8451 IEM_MC_LOCAL(uint16_t, u16Fsw);
8452 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8453 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8454 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8455
8456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8460
8461 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8462 IEM_MC_PREPARE_FPU_USAGE();
8463 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8464 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8465 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8466 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8467 IEM_MC_ELSE()
8468 IEM_MC_IF_FCW_IM()
8469 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8470 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8471 IEM_MC_ENDIF();
8472 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8473 IEM_MC_ENDIF();
8474 IEM_MC_ADVANCE_RIP();
8475
8476 IEM_MC_END();
8477 return VINF_SUCCESS;
8478}
8479
8480
8481/** Opcode 0xdb 11/0. */
8482FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8483{
8484 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486
8487 IEM_MC_BEGIN(0, 1);
8488 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8489
8490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8492
8493 IEM_MC_PREPARE_FPU_USAGE();
8494 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8495 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8496 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8497 IEM_MC_ENDIF();
8498 IEM_MC_UPDATE_FPU_OPCODE_IP();
8499 IEM_MC_ELSE()
8500 IEM_MC_FPU_STACK_UNDERFLOW(0);
8501 IEM_MC_ENDIF();
8502 IEM_MC_ADVANCE_RIP();
8503
8504 IEM_MC_END();
8505 return VINF_SUCCESS;
8506}
8507
8508
8509/** Opcode 0xdb 11/1. */
8510FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514
8515 IEM_MC_BEGIN(0, 1);
8516 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8517
8518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8520
8521 IEM_MC_PREPARE_FPU_USAGE();
8522 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8523 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8524 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8525 IEM_MC_ENDIF();
8526 IEM_MC_UPDATE_FPU_OPCODE_IP();
8527 IEM_MC_ELSE()
8528 IEM_MC_FPU_STACK_UNDERFLOW(0);
8529 IEM_MC_ENDIF();
8530 IEM_MC_ADVANCE_RIP();
8531
8532 IEM_MC_END();
8533 return VINF_SUCCESS;
8534}
8535
8536
8537/** Opcode 0xdb 11/2. */
8538FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8539{
8540 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542
8543 IEM_MC_BEGIN(0, 1);
8544 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8545
8546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8548
8549 IEM_MC_PREPARE_FPU_USAGE();
8550 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8551 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8552 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8553 IEM_MC_ENDIF();
8554 IEM_MC_UPDATE_FPU_OPCODE_IP();
8555 IEM_MC_ELSE()
8556 IEM_MC_FPU_STACK_UNDERFLOW(0);
8557 IEM_MC_ENDIF();
8558 IEM_MC_ADVANCE_RIP();
8559
8560 IEM_MC_END();
8561 return VINF_SUCCESS;
8562}
8563
8564
8565/** Opcode 0xdb 11/3. */
8566FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8567{
8568 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570
8571 IEM_MC_BEGIN(0, 1);
8572 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8573
8574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8576
8577 IEM_MC_PREPARE_FPU_USAGE();
8578 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8579 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8580 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8581 IEM_MC_ENDIF();
8582 IEM_MC_UPDATE_FPU_OPCODE_IP();
8583 IEM_MC_ELSE()
8584 IEM_MC_FPU_STACK_UNDERFLOW(0);
8585 IEM_MC_ENDIF();
8586 IEM_MC_ADVANCE_RIP();
8587
8588 IEM_MC_END();
8589 return VINF_SUCCESS;
8590}
8591
8592
8593/** Opcode 0xdb 0xe0. */
8594FNIEMOP_DEF(iemOp_fneni)
8595{
8596 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_BEGIN(0,0);
8599 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8600 IEM_MC_ADVANCE_RIP();
8601 IEM_MC_END();
8602 return VINF_SUCCESS;
8603}
8604
8605
8606/** Opcode 0xdb 0xe1. */
8607FNIEMOP_DEF(iemOp_fndisi)
8608{
8609 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8611 IEM_MC_BEGIN(0,0);
8612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8613 IEM_MC_ADVANCE_RIP();
8614 IEM_MC_END();
8615 return VINF_SUCCESS;
8616}
8617
8618
8619/** Opcode 0xdb 0xe2. */
8620FNIEMOP_DEF(iemOp_fnclex)
8621{
8622 IEMOP_MNEMONIC(fnclex, "fnclex");
8623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8624
8625 IEM_MC_BEGIN(0,0);
8626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8627 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8628 IEM_MC_CLEAR_FSW_EX();
8629 IEM_MC_ADVANCE_RIP();
8630 IEM_MC_END();
8631 return VINF_SUCCESS;
8632}
8633
8634
8635/** Opcode 0xdb 0xe3. */
8636FNIEMOP_DEF(iemOp_fninit)
8637{
8638 IEMOP_MNEMONIC(fninit, "fninit");
8639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8640 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8641}
8642
8643
8644/** Opcode 0xdb 0xe4. */
8645FNIEMOP_DEF(iemOp_fnsetpm)
8646{
8647 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_BEGIN(0,0);
8650 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8651 IEM_MC_ADVANCE_RIP();
8652 IEM_MC_END();
8653 return VINF_SUCCESS;
8654}
8655
8656
8657/** Opcode 0xdb 0xe5. */
8658FNIEMOP_DEF(iemOp_frstpm)
8659{
8660 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8661#if 0 /* #UDs on newer CPUs */
8662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8663 IEM_MC_BEGIN(0,0);
8664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8665 IEM_MC_ADVANCE_RIP();
8666 IEM_MC_END();
8667 return VINF_SUCCESS;
8668#else
8669 return IEMOP_RAISE_INVALID_OPCODE();
8670#endif
8671}
8672
8673
8674/** Opcode 0xdb 11/5. */
8675FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8676{
8677 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8678 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8679}
8680
8681
8682/** Opcode 0xdb 11/6. */
8683FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8684{
8685 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8686 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8687}
8688
8689
8690/**
8691 * @opcode 0xdb
8692 */
8693FNIEMOP_DEF(iemOp_EscF3)
8694{
8695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8696 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8698 {
8699 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8700 {
8701 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8702 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8703 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8704 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8705 case 4:
8706 switch (bRm)
8707 {
8708 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8709 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8710 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8711 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8712 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8713 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8714 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8715 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8717 }
8718 break;
8719 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8720 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8721 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8723 }
8724 }
8725 else
8726 {
8727 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8728 {
8729 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8730 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8731 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8732 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8733 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8734 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8735 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8736 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8738 }
8739 }
8740}
8741
8742
8743/**
8744 * Common worker for FPU instructions working on STn and ST0, and storing the
8745 * result in STn unless IE, DE or ZE was raised.
8746 *
8747 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8748 */
8749FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8750{
8751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8752
8753 IEM_MC_BEGIN(3, 1);
8754 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8755 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8756 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8757 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8758
8759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8761
8762 IEM_MC_PREPARE_FPU_USAGE();
8763 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8764 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8765 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8766 IEM_MC_ELSE()
8767 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8768 IEM_MC_ENDIF();
8769 IEM_MC_ADVANCE_RIP();
8770
8771 IEM_MC_END();
8772 return VINF_SUCCESS;
8773}
8774
8775
8776/** Opcode 0xdc 11/0. */
8777FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8778{
8779 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8780 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8781}
8782
8783
8784/** Opcode 0xdc 11/1. */
8785FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8786{
8787 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8788 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8789}
8790
8791
8792/** Opcode 0xdc 11/4. */
8793FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8794{
8795 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8796 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8797}
8798
8799
8800/** Opcode 0xdc 11/5. */
8801FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8802{
8803 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8804 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8805}
8806
8807
8808/** Opcode 0xdc 11/6. */
8809FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8810{
8811 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8812 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8813}
8814
8815
8816/** Opcode 0xdc 11/7. */
8817FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8818{
8819 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8820 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8821}
8822
8823
8824/**
8825 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8826 * memory operand, and storing the result in ST0.
8827 *
8828 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8829 */
8830FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8831{
8832 IEM_MC_BEGIN(3, 3);
8833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8834 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8835 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8836 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8837 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8838 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8839
8840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8844
8845 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8846 IEM_MC_PREPARE_FPU_USAGE();
8847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8848 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8849 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8850 IEM_MC_ELSE()
8851 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8852 IEM_MC_ENDIF();
8853 IEM_MC_ADVANCE_RIP();
8854
8855 IEM_MC_END();
8856 return VINF_SUCCESS;
8857}
8858
8859
8860/** Opcode 0xdc !11/0. */
8861FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8862{
8863 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8864 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8865}
8866
8867
8868/** Opcode 0xdc !11/1. */
8869FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8870{
8871 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8872 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8873}
8874
8875
8876/** Opcode 0xdc !11/2. */
8877FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8878{
8879 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8880
8881 IEM_MC_BEGIN(3, 3);
8882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8883 IEM_MC_LOCAL(uint16_t, u16Fsw);
8884 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8885 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8886 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8887 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8888
8889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891
8892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8893 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8894 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8895
8896 IEM_MC_PREPARE_FPU_USAGE();
8897 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8898 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8899 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8900 IEM_MC_ELSE()
8901 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8902 IEM_MC_ENDIF();
8903 IEM_MC_ADVANCE_RIP();
8904
8905 IEM_MC_END();
8906 return VINF_SUCCESS;
8907}
8908
8909
8910/** Opcode 0xdc !11/3. */
8911FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8912{
8913 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8914
8915 IEM_MC_BEGIN(3, 3);
8916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8917 IEM_MC_LOCAL(uint16_t, u16Fsw);
8918 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8919 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8920 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8921 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8922
8923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8925
8926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8928 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8929
8930 IEM_MC_PREPARE_FPU_USAGE();
8931 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8932 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8933 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8934 IEM_MC_ELSE()
8935 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8936 IEM_MC_ENDIF();
8937 IEM_MC_ADVANCE_RIP();
8938
8939 IEM_MC_END();
8940 return VINF_SUCCESS;
8941}
8942
8943
8944/** Opcode 0xdc !11/4. */
8945FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8946{
8947 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8948 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8949}
8950
8951
8952/** Opcode 0xdc !11/5. */
8953FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8954{
8955 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8956 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8957}
8958
8959
8960/** Opcode 0xdc !11/6. */
8961FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8962{
8963 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8964 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8965}
8966
8967
8968/** Opcode 0xdc !11/7. */
8969FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8970{
8971 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8972 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8973}
8974
8975
8976/**
8977 * @opcode 0xdc
8978 */
8979FNIEMOP_DEF(iemOp_EscF4)
8980{
8981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8982 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8983 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8984 {
8985 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8986 {
8987 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8988 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8989 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8990 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8991 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8992 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8993 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8994 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8996 }
8997 }
8998 else
8999 {
9000 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9001 {
9002 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9003 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9004 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9005 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9006 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9007 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9008 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9009 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9011 }
9012 }
9013}
9014
9015
9016/** Opcode 0xdd !11/0.
9017 * @sa iemOp_fld_m32r */
9018FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9019{
9020 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9021
9022 IEM_MC_BEGIN(2, 3);
9023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9024 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9025 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9026 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9027 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9028
9029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9033
9034 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9035 IEM_MC_PREPARE_FPU_USAGE();
9036 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9037 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9038 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9039 IEM_MC_ELSE()
9040 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9041 IEM_MC_ENDIF();
9042 IEM_MC_ADVANCE_RIP();
9043
9044 IEM_MC_END();
9045 return VINF_SUCCESS;
9046}
9047
9048
9049/** Opcode 0xdd !11/0. */
9050FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9051{
9052 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9053 IEM_MC_BEGIN(3, 2);
9054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9055 IEM_MC_LOCAL(uint16_t, u16Fsw);
9056 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9057 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9059
9060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9064
9065 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9066 IEM_MC_PREPARE_FPU_USAGE();
9067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9068 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9069 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9070 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9071 IEM_MC_ELSE()
9072 IEM_MC_IF_FCW_IM()
9073 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9074 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9075 IEM_MC_ENDIF();
9076 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9077 IEM_MC_ENDIF();
9078 IEM_MC_ADVANCE_RIP();
9079
9080 IEM_MC_END();
9081 return VINF_SUCCESS;
9082}
9083
9084
9085/** Opcode 0xdd !11/0. */
9086FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9089 IEM_MC_BEGIN(3, 2);
9090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9091 IEM_MC_LOCAL(uint16_t, u16Fsw);
9092 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9093 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9095
9096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9100
9101 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9102 IEM_MC_PREPARE_FPU_USAGE();
9103 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9104 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9105 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9106 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9107 IEM_MC_ELSE()
9108 IEM_MC_IF_FCW_IM()
9109 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9110 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9111 IEM_MC_ENDIF();
9112 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9113 IEM_MC_ENDIF();
9114 IEM_MC_ADVANCE_RIP();
9115
9116 IEM_MC_END();
9117 return VINF_SUCCESS;
9118}
9119
9120
9121
9122
9123/** Opcode 0xdd !11/0. */
9124FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9125{
9126 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9127 IEM_MC_BEGIN(3, 2);
9128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9129 IEM_MC_LOCAL(uint16_t, u16Fsw);
9130 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9131 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9133
9134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9138
9139 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9140 IEM_MC_PREPARE_FPU_USAGE();
9141 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9142 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9143 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9144 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9145 IEM_MC_ELSE()
9146 IEM_MC_IF_FCW_IM()
9147 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9148 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9149 IEM_MC_ENDIF();
9150 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9151 IEM_MC_ENDIF();
9152 IEM_MC_ADVANCE_RIP();
9153
9154 IEM_MC_END();
9155 return VINF_SUCCESS;
9156}
9157
9158
9159/** Opcode 0xdd !11/0. */
9160FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9161{
9162 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9163 IEM_MC_BEGIN(3, 0);
9164 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9165 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9166 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9170 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9171 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9172 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9173 IEM_MC_END();
9174 return VINF_SUCCESS;
9175}
9176
9177
9178/** Opcode 0xdd !11/0. */
9179FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9180{
9181 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9182 IEM_MC_BEGIN(3, 0);
9183 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9184 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9185 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9189 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9190 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9191 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9192 IEM_MC_END();
9193 return VINF_SUCCESS;
9194
9195}
9196
9197/** Opcode 0xdd !11/0. */
9198FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9199{
9200 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9201
9202 IEM_MC_BEGIN(0, 2);
9203 IEM_MC_LOCAL(uint16_t, u16Tmp);
9204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9205
9206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9208 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9209
9210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9211 IEM_MC_FETCH_FSW(u16Tmp);
9212 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9213 IEM_MC_ADVANCE_RIP();
9214
9215/** @todo Debug / drop a hint to the verifier that things may differ
9216 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9217 * NT4SP1. (X86_FSW_PE) */
9218 IEM_MC_END();
9219 return VINF_SUCCESS;
9220}
9221
9222
9223/** Opcode 0xdd 11/0. */
9224FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9225{
9226 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9228 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9229 unmodified. */
9230
9231 IEM_MC_BEGIN(0, 0);
9232
9233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9235
9236 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9237 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9238 IEM_MC_UPDATE_FPU_OPCODE_IP();
9239
9240 IEM_MC_ADVANCE_RIP();
9241 IEM_MC_END();
9242 return VINF_SUCCESS;
9243}
9244
9245
9246/** Opcode 0xdd 11/1. */
9247FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9248{
9249 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9251
9252 IEM_MC_BEGIN(0, 2);
9253 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9254 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9256 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9257
9258 IEM_MC_PREPARE_FPU_USAGE();
9259 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9260 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9261 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9262 IEM_MC_ELSE()
9263 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9264 IEM_MC_ENDIF();
9265
9266 IEM_MC_ADVANCE_RIP();
9267 IEM_MC_END();
9268 return VINF_SUCCESS;
9269}
9270
9271
9272/** Opcode 0xdd 11/3. */
9273FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9274{
9275 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9276 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9277}
9278
9279
9280/** Opcode 0xdd 11/4. */
9281FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9282{
9283 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9284 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9285}
9286
9287
9288/**
9289 * @opcode 0xdd
9290 */
9291FNIEMOP_DEF(iemOp_EscF5)
9292{
9293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9294 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9295 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9296 {
9297 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9298 {
9299 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9300 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9301 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9302 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9303 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9304 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9305 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9306 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9308 }
9309 }
9310 else
9311 {
9312 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9313 {
9314 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9315 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9316 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9317 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9318 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9319 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9320 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9321 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9323 }
9324 }
9325}
9326
9327
9328/** Opcode 0xde 11/0. */
9329FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9330{
9331 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9332 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9333}
9334
9335
9336/** Opcode 0xde 11/0. */
9337FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9338{
9339 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9340 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9341}
9342
9343
9344/** Opcode 0xde 0xd9. */
9345FNIEMOP_DEF(iemOp_fcompp)
9346{
9347 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9348 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9349}
9350
9351
9352/** Opcode 0xde 11/4. */
9353FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9354{
9355 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9356 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9357}
9358
9359
9360/** Opcode 0xde 11/5. */
9361FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9362{
9363 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9364 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9365}
9366
9367
9368/** Opcode 0xde 11/6. */
9369FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9370{
9371 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9372 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9373}
9374
9375
9376/** Opcode 0xde 11/7. */
9377FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9378{
9379 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9380 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9381}
9382
9383
9384/**
9385 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9386 * the result in ST0.
9387 *
9388 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9389 */
9390FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9391{
9392 IEM_MC_BEGIN(3, 3);
9393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9394 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9395 IEM_MC_LOCAL(int16_t, i16Val2);
9396 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9397 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9398 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9399
9400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9402
9403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9405 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9406
9407 IEM_MC_PREPARE_FPU_USAGE();
9408 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9409 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9410 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9411 IEM_MC_ELSE()
9412 IEM_MC_FPU_STACK_UNDERFLOW(0);
9413 IEM_MC_ENDIF();
9414 IEM_MC_ADVANCE_RIP();
9415
9416 IEM_MC_END();
9417 return VINF_SUCCESS;
9418}
9419
9420
9421/** Opcode 0xde !11/0. */
9422FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9423{
9424 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9426}
9427
9428
9429/** Opcode 0xde !11/1. */
9430FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9431{
9432 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9434}
9435
9436
9437/** Opcode 0xde !11/2. */
9438FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9439{
9440 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9441
9442 IEM_MC_BEGIN(3, 3);
9443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9444 IEM_MC_LOCAL(uint16_t, u16Fsw);
9445 IEM_MC_LOCAL(int16_t, i16Val2);
9446 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9447 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9448 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9449
9450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9452
9453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9455 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9456
9457 IEM_MC_PREPARE_FPU_USAGE();
9458 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9459 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9460 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9461 IEM_MC_ELSE()
9462 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9463 IEM_MC_ENDIF();
9464 IEM_MC_ADVANCE_RIP();
9465
9466 IEM_MC_END();
9467 return VINF_SUCCESS;
9468}
9469
9470
9471/** Opcode 0xde !11/3. */
9472FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9473{
9474 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9475
9476 IEM_MC_BEGIN(3, 3);
9477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9478 IEM_MC_LOCAL(uint16_t, u16Fsw);
9479 IEM_MC_LOCAL(int16_t, i16Val2);
9480 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9481 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9482 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9483
9484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9486
9487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9488 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9489 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9490
9491 IEM_MC_PREPARE_FPU_USAGE();
9492 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9493 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9494 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9495 IEM_MC_ELSE()
9496 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9497 IEM_MC_ENDIF();
9498 IEM_MC_ADVANCE_RIP();
9499
9500 IEM_MC_END();
9501 return VINF_SUCCESS;
9502}
9503
9504
9505/** Opcode 0xde !11/4. */
9506FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9507{
9508 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9509 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9510}
9511
9512
9513/** Opcode 0xde !11/5. */
9514FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9515{
9516 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9517 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9518}
9519
9520
9521/** Opcode 0xde !11/6. */
9522FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9523{
9524 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9525 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9526}
9527
9528
9529/** Opcode 0xde !11/7. */
9530FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9531{
9532 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9533 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9534}
9535
9536
9537/**
9538 * @opcode 0xde
9539 */
9540FNIEMOP_DEF(iemOp_EscF6)
9541{
9542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9543 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9545 {
9546 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9547 {
9548 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9549 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9550 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9551 case 3: if (bRm == 0xd9)
9552 return FNIEMOP_CALL(iemOp_fcompp);
9553 return IEMOP_RAISE_INVALID_OPCODE();
9554 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9555 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9556 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9557 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9559 }
9560 }
9561 else
9562 {
9563 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9564 {
9565 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9566 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9567 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9568 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9569 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9570 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9571 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9572 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9574 }
9575 }
9576}
9577
9578
9579/** Opcode 0xdf 11/0.
9580 * Undocument instruction, assumed to work like ffree + fincstp. */
9581FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9582{
9583 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9585
9586 IEM_MC_BEGIN(0, 0);
9587
9588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9590
9591 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9592 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9593 IEM_MC_FPU_STACK_INC_TOP();
9594 IEM_MC_UPDATE_FPU_OPCODE_IP();
9595
9596 IEM_MC_ADVANCE_RIP();
9597 IEM_MC_END();
9598 return VINF_SUCCESS;
9599}
9600
9601
9602/** Opcode 0xdf 0xe0. */
9603FNIEMOP_DEF(iemOp_fnstsw_ax)
9604{
9605 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9607
9608 IEM_MC_BEGIN(0, 1);
9609 IEM_MC_LOCAL(uint16_t, u16Tmp);
9610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9611 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9612 IEM_MC_FETCH_FSW(u16Tmp);
9613 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9614 IEM_MC_ADVANCE_RIP();
9615 IEM_MC_END();
9616 return VINF_SUCCESS;
9617}
9618
9619
9620/** Opcode 0xdf 11/5. */
9621FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9622{
9623 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9624 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9625}
9626
9627
9628/** Opcode 0xdf 11/6. */
9629FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9630{
9631 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9632 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9633}
9634
9635
9636/** Opcode 0xdf !11/0. */
9637FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9638{
9639 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9640
9641 IEM_MC_BEGIN(2, 3);
9642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9643 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9644 IEM_MC_LOCAL(int16_t, i16Val);
9645 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9646 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9647
9648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9650
9651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9652 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9653 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9654
9655 IEM_MC_PREPARE_FPU_USAGE();
9656 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9657 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9658 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9659 IEM_MC_ELSE()
9660 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9661 IEM_MC_ENDIF();
9662 IEM_MC_ADVANCE_RIP();
9663
9664 IEM_MC_END();
9665 return VINF_SUCCESS;
9666}
9667
9668
9669/** Opcode 0xdf !11/1. */
9670FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9671{
9672 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9673 IEM_MC_BEGIN(3, 2);
9674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9675 IEM_MC_LOCAL(uint16_t, u16Fsw);
9676 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9677 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9678 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9679
9680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9684
9685 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9686 IEM_MC_PREPARE_FPU_USAGE();
9687 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9688 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9689 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9690 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9691 IEM_MC_ELSE()
9692 IEM_MC_IF_FCW_IM()
9693 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9694 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9695 IEM_MC_ENDIF();
9696 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9697 IEM_MC_ENDIF();
9698 IEM_MC_ADVANCE_RIP();
9699
9700 IEM_MC_END();
9701 return VINF_SUCCESS;
9702}
9703
9704
9705/** Opcode 0xdf !11/2. */
9706FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9707{
9708 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9709 IEM_MC_BEGIN(3, 2);
9710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9711 IEM_MC_LOCAL(uint16_t, u16Fsw);
9712 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9713 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9714 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9715
9716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9718 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9719 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9720
9721 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9722 IEM_MC_PREPARE_FPU_USAGE();
9723 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9724 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9725 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9726 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9727 IEM_MC_ELSE()
9728 IEM_MC_IF_FCW_IM()
9729 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9730 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9731 IEM_MC_ENDIF();
9732 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9733 IEM_MC_ENDIF();
9734 IEM_MC_ADVANCE_RIP();
9735
9736 IEM_MC_END();
9737 return VINF_SUCCESS;
9738}
9739
9740
9741/** Opcode 0xdf !11/3. */
9742FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9743{
9744 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9745 IEM_MC_BEGIN(3, 2);
9746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9747 IEM_MC_LOCAL(uint16_t, u16Fsw);
9748 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9749 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9751
9752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9754 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9755 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9756
9757 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9758 IEM_MC_PREPARE_FPU_USAGE();
9759 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9760 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9761 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9762 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9763 IEM_MC_ELSE()
9764 IEM_MC_IF_FCW_IM()
9765 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9766 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9767 IEM_MC_ENDIF();
9768 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9769 IEM_MC_ENDIF();
9770 IEM_MC_ADVANCE_RIP();
9771
9772 IEM_MC_END();
9773 return VINF_SUCCESS;
9774}
9775
9776
9777/** Opcode 0xdf !11/4. */
9778FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9779
9780
9781/** Opcode 0xdf !11/5. */
9782FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9783{
9784 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9785
9786 IEM_MC_BEGIN(2, 3);
9787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9788 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9789 IEM_MC_LOCAL(int64_t, i64Val);
9790 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9791 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9792
9793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9795
9796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9798 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9799
9800 IEM_MC_PREPARE_FPU_USAGE();
9801 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9802 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9803 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9804 IEM_MC_ELSE()
9805 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9806 IEM_MC_ENDIF();
9807 IEM_MC_ADVANCE_RIP();
9808
9809 IEM_MC_END();
9810 return VINF_SUCCESS;
9811}
9812
9813
9814/** Opcode 0xdf !11/6. */
9815FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9816
9817
9818/** Opcode 0xdf !11/7. */
9819FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9820{
9821 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9822 IEM_MC_BEGIN(3, 2);
9823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9824 IEM_MC_LOCAL(uint16_t, u16Fsw);
9825 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9826 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9828
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9832 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9833
9834 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9835 IEM_MC_PREPARE_FPU_USAGE();
9836 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9837 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9838 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9839 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9840 IEM_MC_ELSE()
9841 IEM_MC_IF_FCW_IM()
9842 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9843 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9844 IEM_MC_ENDIF();
9845 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9846 IEM_MC_ENDIF();
9847 IEM_MC_ADVANCE_RIP();
9848
9849 IEM_MC_END();
9850 return VINF_SUCCESS;
9851}
9852
9853
9854/**
9855 * @opcode 0xdf
9856 */
9857FNIEMOP_DEF(iemOp_EscF7)
9858{
9859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9860 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9861 {
9862 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9863 {
9864 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9865 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9866 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9867 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9868 case 4: if (bRm == 0xe0)
9869 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9870 return IEMOP_RAISE_INVALID_OPCODE();
9871 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9872 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9873 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9875 }
9876 }
9877 else
9878 {
9879 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9880 {
9881 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9882 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9883 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9884 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9885 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9886 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9887 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9888 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9890 }
9891 }
9892}
9893
9894
9895/**
9896 * @opcode 0xe0
9897 */
9898FNIEMOP_DEF(iemOp_loopne_Jb)
9899{
9900 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9901 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9904
9905 switch (pVCpu->iem.s.enmEffAddrMode)
9906 {
9907 case IEMMODE_16BIT:
9908 IEM_MC_BEGIN(0,0);
9909 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9910 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9911 IEM_MC_REL_JMP_S8(i8Imm);
9912 } IEM_MC_ELSE() {
9913 IEM_MC_ADVANCE_RIP();
9914 } IEM_MC_ENDIF();
9915 IEM_MC_END();
9916 return VINF_SUCCESS;
9917
9918 case IEMMODE_32BIT:
9919 IEM_MC_BEGIN(0,0);
9920 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9921 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9922 IEM_MC_REL_JMP_S8(i8Imm);
9923 } IEM_MC_ELSE() {
9924 IEM_MC_ADVANCE_RIP();
9925 } IEM_MC_ENDIF();
9926 IEM_MC_END();
9927 return VINF_SUCCESS;
9928
9929 case IEMMODE_64BIT:
9930 IEM_MC_BEGIN(0,0);
9931 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9932 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9933 IEM_MC_REL_JMP_S8(i8Imm);
9934 } IEM_MC_ELSE() {
9935 IEM_MC_ADVANCE_RIP();
9936 } IEM_MC_ENDIF();
9937 IEM_MC_END();
9938 return VINF_SUCCESS;
9939
9940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9941 }
9942}
9943
9944
9945/**
9946 * @opcode 0xe1
9947 */
9948FNIEMOP_DEF(iemOp_loope_Jb)
9949{
9950 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9951 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9953 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9954
9955 switch (pVCpu->iem.s.enmEffAddrMode)
9956 {
9957 case IEMMODE_16BIT:
9958 IEM_MC_BEGIN(0,0);
9959 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9960 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9961 IEM_MC_REL_JMP_S8(i8Imm);
9962 } IEM_MC_ELSE() {
9963 IEM_MC_ADVANCE_RIP();
9964 } IEM_MC_ENDIF();
9965 IEM_MC_END();
9966 return VINF_SUCCESS;
9967
9968 case IEMMODE_32BIT:
9969 IEM_MC_BEGIN(0,0);
9970 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9971 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9972 IEM_MC_REL_JMP_S8(i8Imm);
9973 } IEM_MC_ELSE() {
9974 IEM_MC_ADVANCE_RIP();
9975 } IEM_MC_ENDIF();
9976 IEM_MC_END();
9977 return VINF_SUCCESS;
9978
9979 case IEMMODE_64BIT:
9980 IEM_MC_BEGIN(0,0);
9981 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9982 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9983 IEM_MC_REL_JMP_S8(i8Imm);
9984 } IEM_MC_ELSE() {
9985 IEM_MC_ADVANCE_RIP();
9986 } IEM_MC_ENDIF();
9987 IEM_MC_END();
9988 return VINF_SUCCESS;
9989
9990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9991 }
9992}
9993
9994
9995/**
9996 * @opcode 0xe2
9997 */
9998FNIEMOP_DEF(iemOp_loop_Jb)
9999{
10000 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10001 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10004
10005 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10006 * using the 32-bit operand size override. How can that be restarted? See
10007 * weird pseudo code in intel manual. */
10008 switch (pVCpu->iem.s.enmEffAddrMode)
10009 {
10010 case IEMMODE_16BIT:
10011 IEM_MC_BEGIN(0,0);
10012 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10013 {
10014 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10015 IEM_MC_IF_CX_IS_NZ() {
10016 IEM_MC_REL_JMP_S8(i8Imm);
10017 } IEM_MC_ELSE() {
10018 IEM_MC_ADVANCE_RIP();
10019 } IEM_MC_ENDIF();
10020 }
10021 else
10022 {
10023 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10024 IEM_MC_ADVANCE_RIP();
10025 }
10026 IEM_MC_END();
10027 return VINF_SUCCESS;
10028
10029 case IEMMODE_32BIT:
10030 IEM_MC_BEGIN(0,0);
10031 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10032 {
10033 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10034 IEM_MC_IF_ECX_IS_NZ() {
10035 IEM_MC_REL_JMP_S8(i8Imm);
10036 } IEM_MC_ELSE() {
10037 IEM_MC_ADVANCE_RIP();
10038 } IEM_MC_ENDIF();
10039 }
10040 else
10041 {
10042 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10043 IEM_MC_ADVANCE_RIP();
10044 }
10045 IEM_MC_END();
10046 return VINF_SUCCESS;
10047
10048 case IEMMODE_64BIT:
10049 IEM_MC_BEGIN(0,0);
10050 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10051 {
10052 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10053 IEM_MC_IF_RCX_IS_NZ() {
10054 IEM_MC_REL_JMP_S8(i8Imm);
10055 } IEM_MC_ELSE() {
10056 IEM_MC_ADVANCE_RIP();
10057 } IEM_MC_ENDIF();
10058 }
10059 else
10060 {
10061 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10062 IEM_MC_ADVANCE_RIP();
10063 }
10064 IEM_MC_END();
10065 return VINF_SUCCESS;
10066
10067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10068 }
10069}
10070
10071
10072/**
10073 * @opcode 0xe3
10074 */
10075FNIEMOP_DEF(iemOp_jecxz_Jb)
10076{
10077 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10078 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10080 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10081
10082 switch (pVCpu->iem.s.enmEffAddrMode)
10083 {
10084 case IEMMODE_16BIT:
10085 IEM_MC_BEGIN(0,0);
10086 IEM_MC_IF_CX_IS_NZ() {
10087 IEM_MC_ADVANCE_RIP();
10088 } IEM_MC_ELSE() {
10089 IEM_MC_REL_JMP_S8(i8Imm);
10090 } IEM_MC_ENDIF();
10091 IEM_MC_END();
10092 return VINF_SUCCESS;
10093
10094 case IEMMODE_32BIT:
10095 IEM_MC_BEGIN(0,0);
10096 IEM_MC_IF_ECX_IS_NZ() {
10097 IEM_MC_ADVANCE_RIP();
10098 } IEM_MC_ELSE() {
10099 IEM_MC_REL_JMP_S8(i8Imm);
10100 } IEM_MC_ENDIF();
10101 IEM_MC_END();
10102 return VINF_SUCCESS;
10103
10104 case IEMMODE_64BIT:
10105 IEM_MC_BEGIN(0,0);
10106 IEM_MC_IF_RCX_IS_NZ() {
10107 IEM_MC_ADVANCE_RIP();
10108 } IEM_MC_ELSE() {
10109 IEM_MC_REL_JMP_S8(i8Imm);
10110 } IEM_MC_ENDIF();
10111 IEM_MC_END();
10112 return VINF_SUCCESS;
10113
10114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10115 }
10116}
10117
10118
10119/** Opcode 0xe4 */
10120FNIEMOP_DEF(iemOp_in_AL_Ib)
10121{
10122 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10125 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10126}
10127
10128
10129/** Opcode 0xe5 */
10130FNIEMOP_DEF(iemOp_in_eAX_Ib)
10131{
10132 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10133 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10135 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10136}
10137
10138
10139/** Opcode 0xe6 */
10140FNIEMOP_DEF(iemOp_out_Ib_AL)
10141{
10142 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10143 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10145 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10146}
10147
10148
10149/** Opcode 0xe7 */
10150FNIEMOP_DEF(iemOp_out_Ib_eAX)
10151{
10152 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10155 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10156}
10157
10158
10159/**
10160 * @opcode 0xe8
10161 */
10162FNIEMOP_DEF(iemOp_call_Jv)
10163{
10164 IEMOP_MNEMONIC(call_Jv, "call Jv");
10165 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10166 switch (pVCpu->iem.s.enmEffOpSize)
10167 {
10168 case IEMMODE_16BIT:
10169 {
10170 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10171 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10172 }
10173
10174 case IEMMODE_32BIT:
10175 {
10176 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10177 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10178 }
10179
10180 case IEMMODE_64BIT:
10181 {
10182 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10183 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10184 }
10185
10186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10187 }
10188}
10189
10190
10191/**
10192 * @opcode 0xe9
10193 */
10194FNIEMOP_DEF(iemOp_jmp_Jv)
10195{
10196 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10198 switch (pVCpu->iem.s.enmEffOpSize)
10199 {
10200 case IEMMODE_16BIT:
10201 {
10202 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10203 IEM_MC_BEGIN(0, 0);
10204 IEM_MC_REL_JMP_S16(i16Imm);
10205 IEM_MC_END();
10206 return VINF_SUCCESS;
10207 }
10208
10209 case IEMMODE_64BIT:
10210 case IEMMODE_32BIT:
10211 {
10212 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10213 IEM_MC_BEGIN(0, 0);
10214 IEM_MC_REL_JMP_S32(i32Imm);
10215 IEM_MC_END();
10216 return VINF_SUCCESS;
10217 }
10218
10219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10220 }
10221}
10222
10223
10224/**
10225 * @opcode 0xea
10226 */
10227FNIEMOP_DEF(iemOp_jmp_Ap)
10228{
10229 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10230 IEMOP_HLP_NO_64BIT();
10231
10232 /* Decode the far pointer address and pass it on to the far call C implementation. */
10233 uint32_t offSeg;
10234 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10235 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10236 else
10237 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10238 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10240 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10241}
10242
10243
10244/**
10245 * @opcode 0xeb
10246 */
10247FNIEMOP_DEF(iemOp_jmp_Jb)
10248{
10249 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10250 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10252 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10253
10254 IEM_MC_BEGIN(0, 0);
10255 IEM_MC_REL_JMP_S8(i8Imm);
10256 IEM_MC_END();
10257 return VINF_SUCCESS;
10258}
10259
10260
10261/** Opcode 0xec */
10262FNIEMOP_DEF(iemOp_in_AL_DX)
10263{
10264 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10266 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10267}
10268
10269
10270/** Opcode 0xed */
10271FNIEMOP_DEF(iemOp_eAX_DX)
10272{
10273 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10275 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10276}
10277
10278
10279/** Opcode 0xee */
10280FNIEMOP_DEF(iemOp_out_DX_AL)
10281{
10282 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10284 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10285}
10286
10287
10288/** Opcode 0xef */
10289FNIEMOP_DEF(iemOp_out_DX_eAX)
10290{
10291 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10294}
10295
10296
10297/**
10298 * @opcode 0xf0
10299 */
10300FNIEMOP_DEF(iemOp_lock)
10301{
10302 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10303 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10304
10305 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10306 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10307}
10308
10309
10310/**
10311 * @opcode 0xf1
10312 */
10313FNIEMOP_DEF(iemOp_int1)
10314{
10315 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10316 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10317 /** @todo testcase! */
10318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10319}
10320
10321
10322/**
10323 * @opcode 0xf2
10324 */
10325FNIEMOP_DEF(iemOp_repne)
10326{
10327 /* This overrides any previous REPE prefix. */
10328 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10329 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10330 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10331
10332 /* For the 4 entry opcode tables, REPNZ overrides any previous
10333 REPZ and operand size prefixes. */
10334 pVCpu->iem.s.idxPrefix = 3;
10335
10336 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10337 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10338}
10339
10340
10341/**
10342 * @opcode 0xf3
10343 */
10344FNIEMOP_DEF(iemOp_repe)
10345{
10346 /* This overrides any previous REPNE prefix. */
10347 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10348 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10349 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10350
10351 /* For the 4 entry opcode tables, REPNZ overrides any previous
10352 REPNZ and operand size prefixes. */
10353 pVCpu->iem.s.idxPrefix = 2;
10354
10355 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10356 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10357}
10358
10359
10360/**
10361 * @opcode 0xf4
10362 */
10363FNIEMOP_DEF(iemOp_hlt)
10364{
10365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10367}
10368
10369
10370/**
10371 * @opcode 0xf5
10372 */
10373FNIEMOP_DEF(iemOp_cmc)
10374{
10375 IEMOP_MNEMONIC(cmc, "cmc");
10376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10377 IEM_MC_BEGIN(0, 0);
10378 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10379 IEM_MC_ADVANCE_RIP();
10380 IEM_MC_END();
10381 return VINF_SUCCESS;
10382}
10383
10384
10385/**
10386 * Common implementation of 'inc/dec/not/neg Eb'.
10387 *
10388 * @param bRm The RM byte.
10389 * @param pImpl The instruction implementation.
10390 */
10391FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10392{
10393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10394 {
10395 /* register access */
10396 IEM_MC_BEGIN(2, 0);
10397 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10398 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10399 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10400 IEM_MC_REF_EFLAGS(pEFlags);
10401 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10402 IEM_MC_ADVANCE_RIP();
10403 IEM_MC_END();
10404 }
10405 else
10406 {
10407 /* memory access. */
10408 IEM_MC_BEGIN(2, 2);
10409 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10412
10413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10414 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10415 IEM_MC_FETCH_EFLAGS(EFlags);
10416 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10417 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10418 else
10419 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10420
10421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10422 IEM_MC_COMMIT_EFLAGS(EFlags);
10423 IEM_MC_ADVANCE_RIP();
10424 IEM_MC_END();
10425 }
10426 return VINF_SUCCESS;
10427}
10428
10429
10430/**
10431 * Common implementation of 'inc/dec/not/neg Ev'.
10432 *
10433 * @param bRm The RM byte.
10434 * @param pImpl The instruction implementation.
10435 */
10436FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10437{
10438 /* Registers are handled by a common worker. */
10439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10440 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10441
10442 /* Memory we do here. */
10443 switch (pVCpu->iem.s.enmEffOpSize)
10444 {
10445 case IEMMODE_16BIT:
10446 IEM_MC_BEGIN(2, 2);
10447 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10450
10451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10452 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10453 IEM_MC_FETCH_EFLAGS(EFlags);
10454 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10455 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10456 else
10457 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10458
10459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10460 IEM_MC_COMMIT_EFLAGS(EFlags);
10461 IEM_MC_ADVANCE_RIP();
10462 IEM_MC_END();
10463 return VINF_SUCCESS;
10464
10465 case IEMMODE_32BIT:
10466 IEM_MC_BEGIN(2, 2);
10467 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10470
10471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10472 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10473 IEM_MC_FETCH_EFLAGS(EFlags);
10474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10475 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10476 else
10477 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10478
10479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10480 IEM_MC_COMMIT_EFLAGS(EFlags);
10481 IEM_MC_ADVANCE_RIP();
10482 IEM_MC_END();
10483 return VINF_SUCCESS;
10484
10485 case IEMMODE_64BIT:
10486 IEM_MC_BEGIN(2, 2);
10487 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10490
10491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10492 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10493 IEM_MC_FETCH_EFLAGS(EFlags);
10494 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10495 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10496 else
10497 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10498
10499 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10500 IEM_MC_COMMIT_EFLAGS(EFlags);
10501 IEM_MC_ADVANCE_RIP();
10502 IEM_MC_END();
10503 return VINF_SUCCESS;
10504
10505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10506 }
10507}
10508
10509
10510/** Opcode 0xf6 /0. */
10511FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10512{
10513 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10515
10516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10517 {
10518 /* register access */
10519 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521
10522 IEM_MC_BEGIN(3, 0);
10523 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10524 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10526 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10527 IEM_MC_REF_EFLAGS(pEFlags);
10528 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10529 IEM_MC_ADVANCE_RIP();
10530 IEM_MC_END();
10531 }
10532 else
10533 {
10534 /* memory access. */
10535 IEM_MC_BEGIN(3, 2);
10536 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10537 IEM_MC_ARG(uint8_t, u8Src, 1);
10538 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10540
10541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10542 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10543 IEM_MC_ASSIGN(u8Src, u8Imm);
10544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10545 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10546 IEM_MC_FETCH_EFLAGS(EFlags);
10547 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10548
10549 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10550 IEM_MC_COMMIT_EFLAGS(EFlags);
10551 IEM_MC_ADVANCE_RIP();
10552 IEM_MC_END();
10553 }
10554 return VINF_SUCCESS;
10555}
10556
10557
10558/** Opcode 0xf7 /0. */
10559FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10560{
10561 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10563
10564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10565 {
10566 /* register access */
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 switch (pVCpu->iem.s.enmEffOpSize)
10569 {
10570 case IEMMODE_16BIT:
10571 {
10572 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10573 IEM_MC_BEGIN(3, 0);
10574 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10575 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10577 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10578 IEM_MC_REF_EFLAGS(pEFlags);
10579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10580 IEM_MC_ADVANCE_RIP();
10581 IEM_MC_END();
10582 return VINF_SUCCESS;
10583 }
10584
10585 case IEMMODE_32BIT:
10586 {
10587 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10588 IEM_MC_BEGIN(3, 0);
10589 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10590 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10591 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10592 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10593 IEM_MC_REF_EFLAGS(pEFlags);
10594 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10595 /* No clearing the high dword here - test doesn't write back the result. */
10596 IEM_MC_ADVANCE_RIP();
10597 IEM_MC_END();
10598 return VINF_SUCCESS;
10599 }
10600
10601 case IEMMODE_64BIT:
10602 {
10603 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10604 IEM_MC_BEGIN(3, 0);
10605 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10606 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10607 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10608 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10609 IEM_MC_REF_EFLAGS(pEFlags);
10610 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10611 IEM_MC_ADVANCE_RIP();
10612 IEM_MC_END();
10613 return VINF_SUCCESS;
10614 }
10615
10616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10617 }
10618 }
10619 else
10620 {
10621 /* memory access. */
10622 switch (pVCpu->iem.s.enmEffOpSize)
10623 {
10624 case IEMMODE_16BIT:
10625 {
10626 IEM_MC_BEGIN(3, 2);
10627 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10628 IEM_MC_ARG(uint16_t, u16Src, 1);
10629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10631
10632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10633 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10634 IEM_MC_ASSIGN(u16Src, u16Imm);
10635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10636 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10637 IEM_MC_FETCH_EFLAGS(EFlags);
10638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10639
10640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10641 IEM_MC_COMMIT_EFLAGS(EFlags);
10642 IEM_MC_ADVANCE_RIP();
10643 IEM_MC_END();
10644 return VINF_SUCCESS;
10645 }
10646
10647 case IEMMODE_32BIT:
10648 {
10649 IEM_MC_BEGIN(3, 2);
10650 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10651 IEM_MC_ARG(uint32_t, u32Src, 1);
10652 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10654
10655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10656 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10657 IEM_MC_ASSIGN(u32Src, u32Imm);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10660 IEM_MC_FETCH_EFLAGS(EFlags);
10661 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10662
10663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10664 IEM_MC_COMMIT_EFLAGS(EFlags);
10665 IEM_MC_ADVANCE_RIP();
10666 IEM_MC_END();
10667 return VINF_SUCCESS;
10668 }
10669
10670 case IEMMODE_64BIT:
10671 {
10672 IEM_MC_BEGIN(3, 2);
10673 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10674 IEM_MC_ARG(uint64_t, u64Src, 1);
10675 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10677
10678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10679 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10680 IEM_MC_ASSIGN(u64Src, u64Imm);
10681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10682 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10683 IEM_MC_FETCH_EFLAGS(EFlags);
10684 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10685
10686 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10687 IEM_MC_COMMIT_EFLAGS(EFlags);
10688 IEM_MC_ADVANCE_RIP();
10689 IEM_MC_END();
10690 return VINF_SUCCESS;
10691 }
10692
10693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10694 }
10695 }
10696}
10697
10698
10699/** Opcode 0xf6 /4, /5, /6 and /7. */
10700FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10701{
10702 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10703 {
10704 /* register access */
10705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10706 IEM_MC_BEGIN(3, 1);
10707 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10708 IEM_MC_ARG(uint8_t, u8Value, 1);
10709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10710 IEM_MC_LOCAL(int32_t, rc);
10711
10712 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10713 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10714 IEM_MC_REF_EFLAGS(pEFlags);
10715 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10716 IEM_MC_IF_LOCAL_IS_Z(rc) {
10717 IEM_MC_ADVANCE_RIP();
10718 } IEM_MC_ELSE() {
10719 IEM_MC_RAISE_DIVIDE_ERROR();
10720 } IEM_MC_ENDIF();
10721
10722 IEM_MC_END();
10723 }
10724 else
10725 {
10726 /* memory access. */
10727 IEM_MC_BEGIN(3, 2);
10728 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10729 IEM_MC_ARG(uint8_t, u8Value, 1);
10730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10732 IEM_MC_LOCAL(int32_t, rc);
10733
10734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10736 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10737 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10738 IEM_MC_REF_EFLAGS(pEFlags);
10739 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10740 IEM_MC_IF_LOCAL_IS_Z(rc) {
10741 IEM_MC_ADVANCE_RIP();
10742 } IEM_MC_ELSE() {
10743 IEM_MC_RAISE_DIVIDE_ERROR();
10744 } IEM_MC_ENDIF();
10745
10746 IEM_MC_END();
10747 }
10748 return VINF_SUCCESS;
10749}
10750
10751
10752/** Opcode 0xf7 /4, /5, /6 and /7. */
10753FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10754{
10755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10756
10757 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10758 {
10759 /* register access */
10760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10761 switch (pVCpu->iem.s.enmEffOpSize)
10762 {
10763 case IEMMODE_16BIT:
10764 {
10765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10766 IEM_MC_BEGIN(4, 1);
10767 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10768 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10769 IEM_MC_ARG(uint16_t, u16Value, 2);
10770 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10771 IEM_MC_LOCAL(int32_t, rc);
10772
10773 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10774 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10775 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10776 IEM_MC_REF_EFLAGS(pEFlags);
10777 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10778 IEM_MC_IF_LOCAL_IS_Z(rc) {
10779 IEM_MC_ADVANCE_RIP();
10780 } IEM_MC_ELSE() {
10781 IEM_MC_RAISE_DIVIDE_ERROR();
10782 } IEM_MC_ENDIF();
10783
10784 IEM_MC_END();
10785 return VINF_SUCCESS;
10786 }
10787
10788 case IEMMODE_32BIT:
10789 {
10790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10791 IEM_MC_BEGIN(4, 1);
10792 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10793 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10794 IEM_MC_ARG(uint32_t, u32Value, 2);
10795 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10796 IEM_MC_LOCAL(int32_t, rc);
10797
10798 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10799 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10800 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10801 IEM_MC_REF_EFLAGS(pEFlags);
10802 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10803 IEM_MC_IF_LOCAL_IS_Z(rc) {
10804 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10805 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10806 IEM_MC_ADVANCE_RIP();
10807 } IEM_MC_ELSE() {
10808 IEM_MC_RAISE_DIVIDE_ERROR();
10809 } IEM_MC_ENDIF();
10810
10811 IEM_MC_END();
10812 return VINF_SUCCESS;
10813 }
10814
10815 case IEMMODE_64BIT:
10816 {
10817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10818 IEM_MC_BEGIN(4, 1);
10819 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10820 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10821 IEM_MC_ARG(uint64_t, u64Value, 2);
10822 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10823 IEM_MC_LOCAL(int32_t, rc);
10824
10825 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10826 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10827 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10828 IEM_MC_REF_EFLAGS(pEFlags);
10829 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10830 IEM_MC_IF_LOCAL_IS_Z(rc) {
10831 IEM_MC_ADVANCE_RIP();
10832 } IEM_MC_ELSE() {
10833 IEM_MC_RAISE_DIVIDE_ERROR();
10834 } IEM_MC_ENDIF();
10835
10836 IEM_MC_END();
10837 return VINF_SUCCESS;
10838 }
10839
10840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10841 }
10842 }
10843 else
10844 {
10845 /* memory access. */
10846 switch (pVCpu->iem.s.enmEffOpSize)
10847 {
10848 case IEMMODE_16BIT:
10849 {
10850 IEM_MC_BEGIN(4, 2);
10851 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10852 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10853 IEM_MC_ARG(uint16_t, u16Value, 2);
10854 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10856 IEM_MC_LOCAL(int32_t, rc);
10857
10858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10860 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10861 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10862 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10863 IEM_MC_REF_EFLAGS(pEFlags);
10864 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10865 IEM_MC_IF_LOCAL_IS_Z(rc) {
10866 IEM_MC_ADVANCE_RIP();
10867 } IEM_MC_ELSE() {
10868 IEM_MC_RAISE_DIVIDE_ERROR();
10869 } IEM_MC_ENDIF();
10870
10871 IEM_MC_END();
10872 return VINF_SUCCESS;
10873 }
10874
10875 case IEMMODE_32BIT:
10876 {
10877 IEM_MC_BEGIN(4, 2);
10878 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10879 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10880 IEM_MC_ARG(uint32_t, u32Value, 2);
10881 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10883 IEM_MC_LOCAL(int32_t, rc);
10884
10885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10887 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10888 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10889 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10890 IEM_MC_REF_EFLAGS(pEFlags);
10891 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10892 IEM_MC_IF_LOCAL_IS_Z(rc) {
10893 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10894 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10895 IEM_MC_ADVANCE_RIP();
10896 } IEM_MC_ELSE() {
10897 IEM_MC_RAISE_DIVIDE_ERROR();
10898 } IEM_MC_ENDIF();
10899
10900 IEM_MC_END();
10901 return VINF_SUCCESS;
10902 }
10903
10904 case IEMMODE_64BIT:
10905 {
10906 IEM_MC_BEGIN(4, 2);
10907 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10908 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10909 IEM_MC_ARG(uint64_t, u64Value, 2);
10910 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10912 IEM_MC_LOCAL(int32_t, rc);
10913
10914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10916 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10917 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10918 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10919 IEM_MC_REF_EFLAGS(pEFlags);
10920 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10921 IEM_MC_IF_LOCAL_IS_Z(rc) {
10922 IEM_MC_ADVANCE_RIP();
10923 } IEM_MC_ELSE() {
10924 IEM_MC_RAISE_DIVIDE_ERROR();
10925 } IEM_MC_ENDIF();
10926
10927 IEM_MC_END();
10928 return VINF_SUCCESS;
10929 }
10930
10931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10932 }
10933 }
10934}
10935
10936/**
10937 * @opcode 0xf6
10938 */
10939FNIEMOP_DEF(iemOp_Grp3_Eb)
10940{
10941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10942 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10943 {
10944 case 0:
10945 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10946 case 1:
10947/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10948 return IEMOP_RAISE_INVALID_OPCODE();
10949 case 2:
10950 IEMOP_MNEMONIC(not_Eb, "not Eb");
10951 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10952 case 3:
10953 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10954 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10955 case 4:
10956 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10958 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10959 case 5:
10960 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10962 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10963 case 6:
10964 IEMOP_MNEMONIC(div_Eb, "div Eb");
10965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10966 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10967 case 7:
10968 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10969 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10970 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10972 }
10973}
10974
10975
10976/**
10977 * @opcode 0xf7
10978 */
10979FNIEMOP_DEF(iemOp_Grp3_Ev)
10980{
10981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10982 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10983 {
10984 case 0:
10985 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10986 case 1:
10987/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10988 return IEMOP_RAISE_INVALID_OPCODE();
10989 case 2:
10990 IEMOP_MNEMONIC(not_Ev, "not Ev");
10991 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10992 case 3:
10993 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10994 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10995 case 4:
10996 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10997 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10998 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10999 case 5:
11000 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11001 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11002 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11003 case 6:
11004 IEMOP_MNEMONIC(div_Ev, "div Ev");
11005 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11006 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11007 case 7:
11008 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11009 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11010 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11012 }
11013}
11014
11015
11016/**
11017 * @opcode 0xf8
11018 */
11019FNIEMOP_DEF(iemOp_clc)
11020{
11021 IEMOP_MNEMONIC(clc, "clc");
11022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11023 IEM_MC_BEGIN(0, 0);
11024 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11025 IEM_MC_ADVANCE_RIP();
11026 IEM_MC_END();
11027 return VINF_SUCCESS;
11028}
11029
11030
11031/**
11032 * @opcode 0xf9
11033 */
11034FNIEMOP_DEF(iemOp_stc)
11035{
11036 IEMOP_MNEMONIC(stc, "stc");
11037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11038 IEM_MC_BEGIN(0, 0);
11039 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11040 IEM_MC_ADVANCE_RIP();
11041 IEM_MC_END();
11042 return VINF_SUCCESS;
11043}
11044
11045
11046/**
11047 * @opcode 0xfa
11048 */
11049FNIEMOP_DEF(iemOp_cli)
11050{
11051 IEMOP_MNEMONIC(cli, "cli");
11052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11053 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11054}
11055
11056
11057FNIEMOP_DEF(iemOp_sti)
11058{
11059 IEMOP_MNEMONIC(sti, "sti");
11060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11061 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11062}
11063
11064
11065/**
11066 * @opcode 0xfc
11067 */
11068FNIEMOP_DEF(iemOp_cld)
11069{
11070 IEMOP_MNEMONIC(cld, "cld");
11071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11072 IEM_MC_BEGIN(0, 0);
11073 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11074 IEM_MC_ADVANCE_RIP();
11075 IEM_MC_END();
11076 return VINF_SUCCESS;
11077}
11078
11079
11080/**
11081 * @opcode 0xfd
11082 */
11083FNIEMOP_DEF(iemOp_std)
11084{
11085 IEMOP_MNEMONIC(std, "std");
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_BEGIN(0, 0);
11088 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11089 IEM_MC_ADVANCE_RIP();
11090 IEM_MC_END();
11091 return VINF_SUCCESS;
11092}
11093
11094
11095/**
11096 * @opcode 0xfe
11097 */
11098FNIEMOP_DEF(iemOp_Grp4)
11099{
11100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11101 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11102 {
11103 case 0:
11104 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11105 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11106 case 1:
11107 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11108 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11109 default:
11110 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11111 return IEMOP_RAISE_INVALID_OPCODE();
11112 }
11113}
11114
11115
11116/**
11117 * Opcode 0xff /2.
11118 * @param bRm The RM byte.
11119 */
11120FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11121{
11122 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11123 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11124
11125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11126 {
11127 /* The new RIP is taken from a register. */
11128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11129 switch (pVCpu->iem.s.enmEffOpSize)
11130 {
11131 case IEMMODE_16BIT:
11132 IEM_MC_BEGIN(1, 0);
11133 IEM_MC_ARG(uint16_t, u16Target, 0);
11134 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11135 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11136 IEM_MC_END()
11137 return VINF_SUCCESS;
11138
11139 case IEMMODE_32BIT:
11140 IEM_MC_BEGIN(1, 0);
11141 IEM_MC_ARG(uint32_t, u32Target, 0);
11142 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11143 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11144 IEM_MC_END()
11145 return VINF_SUCCESS;
11146
11147 case IEMMODE_64BIT:
11148 IEM_MC_BEGIN(1, 0);
11149 IEM_MC_ARG(uint64_t, u64Target, 0);
11150 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11151 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11152 IEM_MC_END()
11153 return VINF_SUCCESS;
11154
11155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11156 }
11157 }
11158 else
11159 {
11160 /* The new RIP is taken from a register. */
11161 switch (pVCpu->iem.s.enmEffOpSize)
11162 {
11163 case IEMMODE_16BIT:
11164 IEM_MC_BEGIN(1, 1);
11165 IEM_MC_ARG(uint16_t, u16Target, 0);
11166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11169 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11170 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11171 IEM_MC_END()
11172 return VINF_SUCCESS;
11173
11174 case IEMMODE_32BIT:
11175 IEM_MC_BEGIN(1, 1);
11176 IEM_MC_ARG(uint32_t, u32Target, 0);
11177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11180 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11181 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11182 IEM_MC_END()
11183 return VINF_SUCCESS;
11184
11185 case IEMMODE_64BIT:
11186 IEM_MC_BEGIN(1, 1);
11187 IEM_MC_ARG(uint64_t, u64Target, 0);
11188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11191 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11192 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11193 IEM_MC_END()
11194 return VINF_SUCCESS;
11195
11196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11197 }
11198 }
11199}
11200
11201typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11202
11203FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11204{
11205 /* Registers? How?? */
11206 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11207 { /* likely */ }
11208 else
11209 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11210
11211 /* Far pointer loaded from memory. */
11212 switch (pVCpu->iem.s.enmEffOpSize)
11213 {
11214 case IEMMODE_16BIT:
11215 IEM_MC_BEGIN(3, 1);
11216 IEM_MC_ARG(uint16_t, u16Sel, 0);
11217 IEM_MC_ARG(uint16_t, offSeg, 1);
11218 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11222 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11223 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11224 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11225 IEM_MC_END();
11226 return VINF_SUCCESS;
11227
11228 case IEMMODE_64BIT:
11229 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11230 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11231 * and call far qword [rsp] encodings. */
11232 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11233 {
11234 IEM_MC_BEGIN(3, 1);
11235 IEM_MC_ARG(uint16_t, u16Sel, 0);
11236 IEM_MC_ARG(uint64_t, offSeg, 1);
11237 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11241 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11242 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11243 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11244 IEM_MC_END();
11245 return VINF_SUCCESS;
11246 }
11247 /* AMD falls thru. */
11248 /* fall thru */
11249
11250 case IEMMODE_32BIT:
11251 IEM_MC_BEGIN(3, 1);
11252 IEM_MC_ARG(uint16_t, u16Sel, 0);
11253 IEM_MC_ARG(uint32_t, offSeg, 1);
11254 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11258 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11259 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11260 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11261 IEM_MC_END();
11262 return VINF_SUCCESS;
11263
11264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11265 }
11266}
11267
11268
11269/**
11270 * Opcode 0xff /3.
11271 * @param bRm The RM byte.
11272 */
11273FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11274{
11275 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11276 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11277}
11278
11279
11280/**
11281 * Opcode 0xff /4.
11282 * @param bRm The RM byte.
11283 */
11284FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11285{
11286 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11288
11289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11290 {
11291 /* The new RIP is taken from a register. */
11292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11293 switch (pVCpu->iem.s.enmEffOpSize)
11294 {
11295 case IEMMODE_16BIT:
11296 IEM_MC_BEGIN(0, 1);
11297 IEM_MC_LOCAL(uint16_t, u16Target);
11298 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11299 IEM_MC_SET_RIP_U16(u16Target);
11300 IEM_MC_END()
11301 return VINF_SUCCESS;
11302
11303 case IEMMODE_32BIT:
11304 IEM_MC_BEGIN(0, 1);
11305 IEM_MC_LOCAL(uint32_t, u32Target);
11306 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11307 IEM_MC_SET_RIP_U32(u32Target);
11308 IEM_MC_END()
11309 return VINF_SUCCESS;
11310
11311 case IEMMODE_64BIT:
11312 IEM_MC_BEGIN(0, 1);
11313 IEM_MC_LOCAL(uint64_t, u64Target);
11314 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11315 IEM_MC_SET_RIP_U64(u64Target);
11316 IEM_MC_END()
11317 return VINF_SUCCESS;
11318
11319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11320 }
11321 }
11322 else
11323 {
11324 /* The new RIP is taken from a memory location. */
11325 switch (pVCpu->iem.s.enmEffOpSize)
11326 {
11327 case IEMMODE_16BIT:
11328 IEM_MC_BEGIN(0, 2);
11329 IEM_MC_LOCAL(uint16_t, u16Target);
11330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11333 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11334 IEM_MC_SET_RIP_U16(u16Target);
11335 IEM_MC_END()
11336 return VINF_SUCCESS;
11337
11338 case IEMMODE_32BIT:
11339 IEM_MC_BEGIN(0, 2);
11340 IEM_MC_LOCAL(uint32_t, u32Target);
11341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11344 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11345 IEM_MC_SET_RIP_U32(u32Target);
11346 IEM_MC_END()
11347 return VINF_SUCCESS;
11348
11349 case IEMMODE_64BIT:
11350 IEM_MC_BEGIN(0, 2);
11351 IEM_MC_LOCAL(uint64_t, u64Target);
11352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11355 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11356 IEM_MC_SET_RIP_U64(u64Target);
11357 IEM_MC_END()
11358 return VINF_SUCCESS;
11359
11360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11361 }
11362 }
11363}
11364
11365
11366/**
11367 * Opcode 0xff /5.
11368 * @param bRm The RM byte.
11369 */
11370FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11371{
11372 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11373 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11374}
11375
11376
11377/**
11378 * Opcode 0xff /6.
11379 * @param bRm The RM byte.
11380 */
11381FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11382{
11383 IEMOP_MNEMONIC(push_Ev, "push Ev");
11384
11385 /* Registers are handled by a common worker. */
11386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11387 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11388
11389 /* Memory we do here. */
11390 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11391 switch (pVCpu->iem.s.enmEffOpSize)
11392 {
11393 case IEMMODE_16BIT:
11394 IEM_MC_BEGIN(0, 2);
11395 IEM_MC_LOCAL(uint16_t, u16Src);
11396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11399 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11400 IEM_MC_PUSH_U16(u16Src);
11401 IEM_MC_ADVANCE_RIP();
11402 IEM_MC_END();
11403 return VINF_SUCCESS;
11404
11405 case IEMMODE_32BIT:
11406 IEM_MC_BEGIN(0, 2);
11407 IEM_MC_LOCAL(uint32_t, u32Src);
11408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11411 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11412 IEM_MC_PUSH_U32(u32Src);
11413 IEM_MC_ADVANCE_RIP();
11414 IEM_MC_END();
11415 return VINF_SUCCESS;
11416
11417 case IEMMODE_64BIT:
11418 IEM_MC_BEGIN(0, 2);
11419 IEM_MC_LOCAL(uint64_t, u64Src);
11420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11423 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11424 IEM_MC_PUSH_U64(u64Src);
11425 IEM_MC_ADVANCE_RIP();
11426 IEM_MC_END();
11427 return VINF_SUCCESS;
11428
11429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11430 }
11431}
11432
11433
11434/**
11435 * @opcode 0xff
11436 */
11437FNIEMOP_DEF(iemOp_Grp5)
11438{
11439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11440 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11441 {
11442 case 0:
11443 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11444 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11445 case 1:
11446 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11447 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11448 case 2:
11449 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11450 case 3:
11451 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11452 case 4:
11453 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11454 case 5:
11455 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11456 case 6:
11457 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11458 case 7:
11459 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11460 return IEMOP_RAISE_INVALID_OPCODE();
11461 }
11462 AssertFailedReturn(VERR_IEM_IPE_3);
11463}
11464
11465
11466
11467const PFNIEMOP g_apfnOneByteMap[256] =
11468{
11469 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11470 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11471 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11472 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11473 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11474 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11475 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11476 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11477 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11478 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11479 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11480 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11481 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11482 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11483 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11484 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11485 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11486 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11487 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11488 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11489 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11490 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11491 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11492 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11493 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11494 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11495 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11496 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11497 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11498 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11499 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11500 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11501 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11502 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11503 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11504 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11505 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11506 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11507 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11508 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11509 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11510 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11511 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11512 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11513 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11514 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11515 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11516 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11517 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11518 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11519 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11520 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11521 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11522 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11523 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11524 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11525 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11526 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11527 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11528 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11529 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11530 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11531 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11532 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11533};
11534
11535
11536/** @} */
11537
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette