VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65880

Last change on this file since 65880 was 65880, checked in by vboxsync, 8 years ago

IEM,DIS: Updates

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 375.1 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65880 2017-02-25 14:51:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.215389.xyz. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42
43/**
44 * @opcode 0x00
45 * @opmnemonic add
46 * @op1 rm:Eb
47 * @op2 reg:Gb
48 * @opmaps one
49 * @openc ModR/M
50 * @opflmodify of,sf,zf,af,pf,cf
51 * @ophints harmless ignores_op_size
52 * @opstats add_Eb_Gb
53 * @opgroup op_gen_arith_bin
54 * @optest op1=1 op2=1 -> op1=2 efl=of,sf,zf,af
55 */
56FNIEMOP_DEF(iemOp_add_Eb_Gb)
57{
58 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
59 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
60}
61
62
63/**
64 * @opcode 0x01
65 * @opgroup op_gen_arith_bin
66 * @opflmodify of,sf,zf,af,pf,cf
67 */
68FNIEMOP_DEF(iemOp_add_Ev_Gv)
69{
70 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
71 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
72}
73
74
75/**
76 * @opcode 0x02
77 * @opgroup op_gen_arith_bin
78 * @opflmodify of,sf,zf,af,pf,cf
79 */
80FNIEMOP_DEF(iemOp_add_Gb_Eb)
81{
82 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
83 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
84}
85
86
87/**
88 * @opcode 0x03
89 * @opgroup op_gen_arith_bin
90 * @opflmodify of,sf,zf,af,pf,cf
91 */
92FNIEMOP_DEF(iemOp_add_Gv_Ev)
93{
94 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
95 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
96}
97
98
99/**
100 * @opcode 0x04
101 * @opgroup op_gen_arith_bin
102 * @opflmodify of,sf,zf,af,pf,cf
103 */
104FNIEMOP_DEF(iemOp_add_Al_Ib)
105{
106 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
108}
109
110
111/**
112 * @opcode 0x05
113 * @opgroup op_gen_arith_bin
114 * @opflmodify of,sf,zf,af,pf,cf
115 */
116FNIEMOP_DEF(iemOp_add_eAX_Iz)
117{
118 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
120}
121
122
123/**
124 * @opcode 0x06
125 * @opgroup op_stack_sreg
126 */
127FNIEMOP_DEF(iemOp_push_ES)
128{
129 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
130 IEMOP_HLP_NO_64BIT();
131 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
132}
133
134
135/**
136 * @opcode 0x07
137 * @opgroup op_stack_sreg
138 */
139FNIEMOP_DEF(iemOp_pop_ES)
140{
141 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
142 IEMOP_HLP_NO_64BIT();
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
145}
146
147
148/**
149 * @opcode 0x08
150 * @opgroup op_gen_arith_bin
151 * @opflmodify of,sf,zf,af,pf,cf
152 * @opflundef af
153 * @opflclear of,cf
154 */
155FNIEMOP_DEF(iemOp_or_Eb_Gb)
156{
157 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
160}
161
162
163/**
164 * @opcode 0x09
165 * @opgroup op_gen_arith_bin
166 * @opflmodify of,sf,zf,af,pf,cf
167 * @opflundef af
168 * @opflclear of,cf
169 */
170FNIEMOP_DEF(iemOp_or_Ev_Gv)
171{
172 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
174 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
175}
176
177
178/**
179 * @opcode 0x0a
180 * @opgroup op_gen_arith_bin
181 * @opflmodify of,sf,zf,af,pf,cf
182 * @opflundef af
183 * @opflclear of,cf
184 */
185FNIEMOP_DEF(iemOp_or_Gb_Eb)
186{
187 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
190}
191
192
193/**
194 * @opcode 0x0b
195 * @opgroup op_gen_arith_bin
196 * @opflmodify of,sf,zf,af,pf,cf
197 * @opflundef af
198 * @opflclear of,cf
199 */
200FNIEMOP_DEF(iemOp_or_Gv_Ev)
201{
202 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
204 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
205}
206
207
208/**
209 * @opcode 0x0c
210 * @opgroup op_gen_arith_bin
211 * @opflmodify of,sf,zf,af,pf,cf
212 * @opflundef af
213 * @opflclear of,cf
214 */
215FNIEMOP_DEF(iemOp_or_Al_Ib)
216{
217 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
218 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
219 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
220}
221
222
223/**
224 * @opcode 0x0d
225 * @opgroup op_gen_arith_bin
226 * @opflmodify of,sf,zf,af,pf,cf
227 * @opflundef af
228 * @opflclear of,cf
229 */
230FNIEMOP_DEF(iemOp_or_eAX_Iz)
231{
232 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
234 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
235}
236
237
238/**
239 * @opcode 0x0e
240 * @opgroup op_stack_sreg
241 */
242FNIEMOP_DEF(iemOp_push_CS)
243{
244 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
245 IEMOP_HLP_NO_64BIT();
246 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
247}
248
249
250/**
251 * @opcode 0x0f
252 * @opmnemonic EscTwo0f
253 * @openc two0f
254 * @opdisenum OP_2B_ESC
255 * @ophints harmless
256 * @opgroup op_escapes
257 */
258FNIEMOP_DEF(iemOp_2byteEscape)
259{
260#ifdef VBOX_STRICT
261 /* Sanity check the table the first time around. */
262 static bool s_fTested = false;
263 if (RT_LIKELY(s_fTested)) { /* likely */ }
264 else
265 {
266 s_fTested = true;
267 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
268 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
269 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
270 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
271 }
272#endif
273
274 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
275 {
276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
277 IEMOP_HLP_MIN_286();
278 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
279 }
280 /* @opdone */
281
282 /*
283 * On the 8086 this is a POP CS instruction.
284 * For the time being we don't specify this this.
285 */
286 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
287 IEMOP_HLP_NO_64BIT();
288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
289 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
290}
291
292/**
293 * @opcode 0x10
294 * @opgroup op_gen_arith_bin
295 * @opfltest cf
296 * @opflmodify of,sf,zf,af,pf,cf
297 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=of,sf,zf,af
298 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=of,sf,zf,af
299 */
300FNIEMOP_DEF(iemOp_adc_Eb_Gb)
301{
302 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
304}
305
306
307/**
308 * @opcode 0x11
309 * @opgroup op_gen_arith_bin
310 * @opfltest cf
311 * @opflmodify of,sf,zf,af,pf,cf
312 */
313FNIEMOP_DEF(iemOp_adc_Ev_Gv)
314{
315 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
317}
318
319
320/**
321 * @opcode 0x12
322 * @opgroup op_gen_arith_bin
323 * @opfltest cf
324 * @opflmodify of,sf,zf,af,pf,cf
325 */
326FNIEMOP_DEF(iemOp_adc_Gb_Eb)
327{
328 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
330}
331
332
333/**
334 * @opcode 0x13
335 * @opgroup op_gen_arith_bin
336 * @opfltest cf
337 * @opflmodify of,sf,zf,af,pf,cf
338 */
339FNIEMOP_DEF(iemOp_adc_Gv_Ev)
340{
341 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
342 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
343}
344
345
346/**
347 * @opcode 0x14
348 * @opgroup op_gen_arith_bin
349 * @opfltest cf
350 * @opflmodify of,sf,zf,af,pf,cf
351 */
352FNIEMOP_DEF(iemOp_adc_Al_Ib)
353{
354 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
356}
357
358
359/**
360 * @opcode 0x15
361 * @opgroup op_gen_arith_bin
362 * @opfltest cf
363 * @opflmodify of,sf,zf,af,pf,cf
364 */
365FNIEMOP_DEF(iemOp_adc_eAX_Iz)
366{
367 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
369}
370
371
372/**
373 * @opcode 0x16
374 */
375FNIEMOP_DEF(iemOp_push_SS)
376{
377 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
378 IEMOP_HLP_NO_64BIT();
379 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
380}
381
382
383/**
384 * @opcode 0x17
385 * @opgroup op_gen_arith_bin
386 * @opfltest cf
387 * @opflmodify of,sf,zf,af,pf,cf
388 */
389FNIEMOP_DEF(iemOp_pop_SS)
390{
391 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
393 IEMOP_HLP_NO_64BIT();
394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
395}
396
397
398/**
399 * @opcode 0x18
400 * @opgroup op_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify of,sf,zf,af,pf,cf
403 */
404FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
405{
406 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
408}
409
410
411/**
412 * @opcode 0x19
413 * @opgroup op_gen_arith_bin
414 * @opfltest cf
415 * @opflmodify of,sf,zf,af,pf,cf
416 */
417FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
418{
419 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
420 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
421}
422
423
424/**
425 * @opcode 0x1a
426 * @opgroup op_gen_arith_bin
427 * @opfltest cf
428 * @opflmodify of,sf,zf,af,pf,cf
429 */
430FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
431{
432 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
433 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
434}
435
436
437/**
438 * @opcode 0x1b
439 * @opgroup op_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify of,sf,zf,af,pf,cf
442 */
443FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
444{
445 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
447}
448
449
450/**
451 * @opcode 0x1c
452 * @opgroup op_gen_arith_bin
453 * @opfltest cf
454 * @opflmodify of,sf,zf,af,pf,cf
455 */
456FNIEMOP_DEF(iemOp_sbb_Al_Ib)
457{
458 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
459 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
460}
461
462
463/**
464 * @opcode 0x1d
465 * @opgroup op_gen_arith_bin
466 * @opfltest cf
467 * @opflmodify of,sf,zf,af,pf,cf
468 */
469FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
470{
471 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
472 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
473}
474
475
476/**
477 * @opcode 0x1e
478 * @opgroup op_stack_sreg
479 */
480FNIEMOP_DEF(iemOp_push_DS)
481{
482 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
483 IEMOP_HLP_NO_64BIT();
484 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
485}
486
487
488/**
489 * @opcode 0x1f
490 * @opgroup op_stack_sreg
491 */
492FNIEMOP_DEF(iemOp_pop_DS)
493{
494 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
496 IEMOP_HLP_NO_64BIT();
497 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
498}
499
500
501/**
502 * @opcode 0x20
503 * @opgroup op_gen_arith_bin
504 * @opflmodify of,sf,zf,af,pf,cf
505 * @opflundef af
506 * @opflclear of,cf
507 */
508FNIEMOP_DEF(iemOp_and_Eb_Gb)
509{
510 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
513}
514
515
516/**
517 * @opcode 0x21
518 * @opgroup op_gen_arith_bin
519 * @opflmodify of,sf,zf,af,pf,cf
520 * @opflundef af
521 * @opflclear of,cf
522 */
523FNIEMOP_DEF(iemOp_and_Ev_Gv)
524{
525 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
527 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
528}
529
530
531/**
532 * @opcode 0x22
533 * @opgroup op_gen_arith_bin
534 * @opflmodify of,sf,zf,af,pf,cf
535 * @opflundef af
536 * @opflclear of,cf
537 */
538FNIEMOP_DEF(iemOp_and_Gb_Eb)
539{
540 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
541 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
542 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
543}
544
545
546/**
547 * @opcode 0x23
548 * @opgroup op_gen_arith_bin
549 * @opflmodify of,sf,zf,af,pf,cf
550 * @opflundef af
551 * @opflclear of,cf
552 */
553FNIEMOP_DEF(iemOp_and_Gv_Ev)
554{
555 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
557 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
558}
559
560
561/**
562 * @opcode 0x24
563 * @opgroup op_gen_arith_bin
564 * @opflmodify of,sf,zf,af,pf,cf
565 * @opflundef af
566 * @opflclear of,cf
567 */
568FNIEMOP_DEF(iemOp_and_Al_Ib)
569{
570 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
572 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
573}
574
575
576/**
577 * @opcode 0x25
578 * @opgroup op_gen_arith_bin
579 * @opflmodify of,sf,zf,af,pf,cf
580 * @opflundef af
581 * @opflclear of,cf
582 */
583FNIEMOP_DEF(iemOp_and_eAX_Iz)
584{
585 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
587 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
588}
589
590
591/**
592 * @opcode 0x26
593 * @opmnemonic SEG
594 * @op1 ES
595 * @opgroup op_prefix
596 * @openc prefix
597 * @opdisenum OP_SEG
598 * @ophints harmless
599 */
600FNIEMOP_DEF(iemOp_seg_ES)
601{
602 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
603 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
604 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
605
606 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
607 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
608}
609
610
611/**
612 * @opcode 0x27
613 * @opfltest af,cf
614 * @opflmodify of,sf,zf,af,pf,cf
615 * @opflundef of
616 */
617FNIEMOP_DEF(iemOp_daa)
618{
619 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
620 IEMOP_HLP_NO_64BIT();
621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
624}
625
626
627/**
628 * @opcode 0x28
629 * @opgroup op_gen_arith_bin
630 * @opflmodify of,sf,zf,af,pf,cf
631 */
632FNIEMOP_DEF(iemOp_sub_Eb_Gb)
633{
634 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
636}
637
638
639/**
640 * @opcode 0x29
641 * @opgroup op_gen_arith_bin
642 * @opflmodify of,sf,zf,af,pf,cf
643 */
644FNIEMOP_DEF(iemOp_sub_Ev_Gv)
645{
646 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
647 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
648}
649
650
651/**
652 * @opcode 0x2a
653 * @opgroup op_gen_arith_bin
654 * @opflmodify of,sf,zf,af,pf,cf
655 */
656FNIEMOP_DEF(iemOp_sub_Gb_Eb)
657{
658 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
660}
661
662
663/**
664 * @opcode 0x2b
665 * @opgroup op_gen_arith_bin
666 * @opflmodify of,sf,zf,af,pf,cf
667 */
668FNIEMOP_DEF(iemOp_sub_Gv_Ev)
669{
670 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
671 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
672}
673
674
675/**
676 * @opcode 0x2c
677 * @opgroup op_gen_arith_bin
678 * @opflmodify of,sf,zf,af,pf,cf
679 */
680FNIEMOP_DEF(iemOp_sub_Al_Ib)
681{
682 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
683 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
684}
685
686
687/**
688 * @opcode 0x2d
689 * @opgroup op_gen_arith_bin
690 * @opflmodify of,sf,zf,af,pf,cf
691 */
692FNIEMOP_DEF(iemOp_sub_eAX_Iz)
693{
694 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
695 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
696}
697
698
699/**
700 * @opcode 0x2e
701 * @opmnemonic SEG
702 * @op1 CS
703 * @opgroup op_prefix
704 * @openc prefix
705 * @opdisenum OP_SEG
706 * @ophints harmless
707 */
708FNIEMOP_DEF(iemOp_seg_CS)
709{
710 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
711 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
712 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
713
714 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
715 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
716}
717
718
719/**
720 * @opcode 0x2f
721 * @opfltest af,cf
722 * @opflmodify of,sf,zf,af,pf,cf
723 * @opflundef of
724 */
725FNIEMOP_DEF(iemOp_das)
726{
727 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
728 IEMOP_HLP_NO_64BIT();
729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
732}
733
734
735/**
736 * @opcode 0x30
737 */
738FNIEMOP_DEF(iemOp_xor_Eb_Gb)
739{
740 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
741 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
742 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
743}
744
745
746/**
747 * @opcode 0x31
748 */
749FNIEMOP_DEF(iemOp_xor_Ev_Gv)
750{
751 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
753 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
754}
755
756
757/**
758 * @opcode 0x32
759 */
760FNIEMOP_DEF(iemOp_xor_Gb_Eb)
761{
762 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
763 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
764 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
765}
766
767
768/**
769 * @opcode 0x33
770 */
771FNIEMOP_DEF(iemOp_xor_Gv_Ev)
772{
773 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
776}
777
778
779/**
780 * @opcode 0x34
781 */
782FNIEMOP_DEF(iemOp_xor_Al_Ib)
783{
784 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
785 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
787}
788
789
790/**
791 * @opcode 0x35
792 */
793FNIEMOP_DEF(iemOp_xor_eAX_Iz)
794{
795 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
796 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
797 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
798}
799
800
801/**
802 * @opcode 0x36
803 */
804FNIEMOP_DEF(iemOp_seg_SS)
805{
806 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
807 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
808 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
809
810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
812}
813
814
815/**
816 * @opcode 0x37
817 */
818FNIEMOP_STUB(iemOp_aaa);
819
820
821/**
822 * @opcode 0x38
823 */
824FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
825{
826 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
827 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
828}
829
830
831/**
832 * @opcode 0x39
833 */
834FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
835{
836 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
837 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
838}
839
840
841/**
842 * @opcode 0x3a
843 */
844FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
845{
846 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
847 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
848}
849
850
851/**
852 * @opcode 0x3b
853 */
854FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
855{
856 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
857 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
858}
859
860
861/**
862 * @opcode 0x3c
863 */
864FNIEMOP_DEF(iemOp_cmp_Al_Ib)
865{
866 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
867 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
868}
869
870
871/**
872 * @opcode 0x3d
873 */
874FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
875{
876 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
877 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
878}
879
880
881/**
882 * @opcode 0x3e
883 */
884FNIEMOP_DEF(iemOp_seg_DS)
885{
886 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
887 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
888 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
889
890 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
891 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
892}
893
894
895/**
896 * @opcode 0x3f
897 */
898FNIEMOP_STUB(iemOp_aas);
899
900/**
901 * Common 'inc/dec/not/neg register' helper.
902 */
903FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
904{
905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
906 switch (pVCpu->iem.s.enmEffOpSize)
907 {
908 case IEMMODE_16BIT:
909 IEM_MC_BEGIN(2, 0);
910 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
911 IEM_MC_ARG(uint32_t *, pEFlags, 1);
912 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
913 IEM_MC_REF_EFLAGS(pEFlags);
914 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
915 IEM_MC_ADVANCE_RIP();
916 IEM_MC_END();
917 return VINF_SUCCESS;
918
919 case IEMMODE_32BIT:
920 IEM_MC_BEGIN(2, 0);
921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
922 IEM_MC_ARG(uint32_t *, pEFlags, 1);
923 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
924 IEM_MC_REF_EFLAGS(pEFlags);
925 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
926 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
927 IEM_MC_ADVANCE_RIP();
928 IEM_MC_END();
929 return VINF_SUCCESS;
930
931 case IEMMODE_64BIT:
932 IEM_MC_BEGIN(2, 0);
933 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
934 IEM_MC_ARG(uint32_t *, pEFlags, 1);
935 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
936 IEM_MC_REF_EFLAGS(pEFlags);
937 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
938 IEM_MC_ADVANCE_RIP();
939 IEM_MC_END();
940 return VINF_SUCCESS;
941 }
942 return VINF_SUCCESS;
943}
944
945
946/**
947 * @opcode 0x40
948 */
949FNIEMOP_DEF(iemOp_inc_eAX)
950{
951 /*
952 * This is a REX prefix in 64-bit mode.
953 */
954 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
955 {
956 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
957 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
958
959 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
960 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
961 }
962
963 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
964 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
965}
966
967
968/**
969 * @opcode 0x41
970 */
971FNIEMOP_DEF(iemOp_inc_eCX)
972{
973 /*
974 * This is a REX prefix in 64-bit mode.
975 */
976 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
977 {
978 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
979 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
980 pVCpu->iem.s.uRexB = 1 << 3;
981
982 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
983 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
984 }
985
986 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
987 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
988}
989
990
991/**
992 * @opcode 0x42
993 */
994FNIEMOP_DEF(iemOp_inc_eDX)
995{
996 /*
997 * This is a REX prefix in 64-bit mode.
998 */
999 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1000 {
1001 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1002 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1003 pVCpu->iem.s.uRexIndex = 1 << 3;
1004
1005 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1006 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1007 }
1008
1009 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1010 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1011}
1012
1013
1014
1015/**
1016 * @opcode 0x43
1017 */
1018FNIEMOP_DEF(iemOp_inc_eBX)
1019{
1020 /*
1021 * This is a REX prefix in 64-bit mode.
1022 */
1023 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1024 {
1025 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1026 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1027 pVCpu->iem.s.uRexB = 1 << 3;
1028 pVCpu->iem.s.uRexIndex = 1 << 3;
1029
1030 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1031 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1032 }
1033
1034 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1035 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1036}
1037
1038
1039/**
1040 * @opcode 0x44
1041 */
1042FNIEMOP_DEF(iemOp_inc_eSP)
1043{
1044 /*
1045 * This is a REX prefix in 64-bit mode.
1046 */
1047 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1048 {
1049 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1050 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1051 pVCpu->iem.s.uRexReg = 1 << 3;
1052
1053 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1054 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1055 }
1056
1057 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1058 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1059}
1060
1061
1062/**
1063 * @opcode 0x45
1064 */
1065FNIEMOP_DEF(iemOp_inc_eBP)
1066{
1067 /*
1068 * This is a REX prefix in 64-bit mode.
1069 */
1070 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1071 {
1072 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1073 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1074 pVCpu->iem.s.uRexReg = 1 << 3;
1075 pVCpu->iem.s.uRexB = 1 << 3;
1076
1077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1078 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1079 }
1080
1081 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1082 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1083}
1084
1085
1086/**
1087 * @opcode 0x46
1088 */
1089FNIEMOP_DEF(iemOp_inc_eSI)
1090{
1091 /*
1092 * This is a REX prefix in 64-bit mode.
1093 */
1094 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1095 {
1096 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1097 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1098 pVCpu->iem.s.uRexReg = 1 << 3;
1099 pVCpu->iem.s.uRexIndex = 1 << 3;
1100
1101 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1102 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1103 }
1104
1105 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1106 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1107}
1108
1109
1110/**
1111 * @opcode 0x47
1112 */
1113FNIEMOP_DEF(iemOp_inc_eDI)
1114{
1115 /*
1116 * This is a REX prefix in 64-bit mode.
1117 */
1118 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1119 {
1120 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1121 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1122 pVCpu->iem.s.uRexReg = 1 << 3;
1123 pVCpu->iem.s.uRexB = 1 << 3;
1124 pVCpu->iem.s.uRexIndex = 1 << 3;
1125
1126 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1127 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1128 }
1129
1130 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1131 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1132}
1133
1134
1135/**
1136 * @opcode 0x48
1137 */
1138FNIEMOP_DEF(iemOp_dec_eAX)
1139{
1140 /*
1141 * This is a REX prefix in 64-bit mode.
1142 */
1143 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1144 {
1145 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1147 iemRecalEffOpSize(pVCpu);
1148
1149 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1150 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1151 }
1152
1153 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1154 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1155}
1156
1157
1158/**
1159 * @opcode 0x49
1160 */
1161FNIEMOP_DEF(iemOp_dec_eCX)
1162{
1163 /*
1164 * This is a REX prefix in 64-bit mode.
1165 */
1166 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1167 {
1168 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1169 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1170 pVCpu->iem.s.uRexB = 1 << 3;
1171 iemRecalEffOpSize(pVCpu);
1172
1173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1175 }
1176
1177 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1178 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1179}
1180
1181
1182/**
1183 * @opcode 0x4a
1184 */
1185FNIEMOP_DEF(iemOp_dec_eDX)
1186{
1187 /*
1188 * This is a REX prefix in 64-bit mode.
1189 */
1190 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1191 {
1192 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1193 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1194 pVCpu->iem.s.uRexIndex = 1 << 3;
1195 iemRecalEffOpSize(pVCpu);
1196
1197 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1198 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1199 }
1200
1201 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1202 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1203}
1204
1205
1206/**
1207 * @opcode 0x4b
1208 */
1209FNIEMOP_DEF(iemOp_dec_eBX)
1210{
1211 /*
1212 * This is a REX prefix in 64-bit mode.
1213 */
1214 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1215 {
1216 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1217 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1218 pVCpu->iem.s.uRexB = 1 << 3;
1219 pVCpu->iem.s.uRexIndex = 1 << 3;
1220 iemRecalEffOpSize(pVCpu);
1221
1222 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1223 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1224 }
1225
1226 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1227 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1228}
1229
1230
1231/**
1232 * @opcode 0x4c
1233 */
1234FNIEMOP_DEF(iemOp_dec_eSP)
1235{
1236 /*
1237 * This is a REX prefix in 64-bit mode.
1238 */
1239 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1240 {
1241 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1242 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1243 pVCpu->iem.s.uRexReg = 1 << 3;
1244 iemRecalEffOpSize(pVCpu);
1245
1246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1248 }
1249
1250 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1251 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1252}
1253
1254
1255/**
1256 * @opcode 0x4d
1257 */
1258FNIEMOP_DEF(iemOp_dec_eBP)
1259{
1260 /*
1261 * This is a REX prefix in 64-bit mode.
1262 */
1263 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1264 {
1265 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1266 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1267 pVCpu->iem.s.uRexReg = 1 << 3;
1268 pVCpu->iem.s.uRexB = 1 << 3;
1269 iemRecalEffOpSize(pVCpu);
1270
1271 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1272 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1273 }
1274
1275 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1276 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1277}
1278
1279
1280/**
1281 * @opcode 0x4e
1282 */
1283FNIEMOP_DEF(iemOp_dec_eSI)
1284{
1285 /*
1286 * This is a REX prefix in 64-bit mode.
1287 */
1288 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1289 {
1290 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1291 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1292 pVCpu->iem.s.uRexReg = 1 << 3;
1293 pVCpu->iem.s.uRexIndex = 1 << 3;
1294 iemRecalEffOpSize(pVCpu);
1295
1296 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1297 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1298 }
1299
1300 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1301 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1302}
1303
1304
1305/**
1306 * @opcode 0x4f
1307 */
1308FNIEMOP_DEF(iemOp_dec_eDI)
1309{
1310 /*
1311 * This is a REX prefix in 64-bit mode.
1312 */
1313 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1314 {
1315 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1316 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1317 pVCpu->iem.s.uRexReg = 1 << 3;
1318 pVCpu->iem.s.uRexB = 1 << 3;
1319 pVCpu->iem.s.uRexIndex = 1 << 3;
1320 iemRecalEffOpSize(pVCpu);
1321
1322 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1323 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1324 }
1325
1326 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1327 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1328}
1329
1330
1331/**
1332 * Common 'push register' helper.
1333 */
1334FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1335{
1336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1337 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1338 {
1339 iReg |= pVCpu->iem.s.uRexB;
1340 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1341 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1342 }
1343
1344 switch (pVCpu->iem.s.enmEffOpSize)
1345 {
1346 case IEMMODE_16BIT:
1347 IEM_MC_BEGIN(0, 1);
1348 IEM_MC_LOCAL(uint16_t, u16Value);
1349 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1350 IEM_MC_PUSH_U16(u16Value);
1351 IEM_MC_ADVANCE_RIP();
1352 IEM_MC_END();
1353 break;
1354
1355 case IEMMODE_32BIT:
1356 IEM_MC_BEGIN(0, 1);
1357 IEM_MC_LOCAL(uint32_t, u32Value);
1358 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1359 IEM_MC_PUSH_U32(u32Value);
1360 IEM_MC_ADVANCE_RIP();
1361 IEM_MC_END();
1362 break;
1363
1364 case IEMMODE_64BIT:
1365 IEM_MC_BEGIN(0, 1);
1366 IEM_MC_LOCAL(uint64_t, u64Value);
1367 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1368 IEM_MC_PUSH_U64(u64Value);
1369 IEM_MC_ADVANCE_RIP();
1370 IEM_MC_END();
1371 break;
1372 }
1373
1374 return VINF_SUCCESS;
1375}
1376
1377
1378/**
1379 * @opcode 0x50
1380 */
1381FNIEMOP_DEF(iemOp_push_eAX)
1382{
1383 IEMOP_MNEMONIC(push_rAX, "push rAX");
1384 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1385}
1386
1387
1388/**
1389 * @opcode 0x51
1390 */
1391FNIEMOP_DEF(iemOp_push_eCX)
1392{
1393 IEMOP_MNEMONIC(push_rCX, "push rCX");
1394 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1395}
1396
1397
1398/**
1399 * @opcode 0x52
1400 */
1401FNIEMOP_DEF(iemOp_push_eDX)
1402{
1403 IEMOP_MNEMONIC(push_rDX, "push rDX");
1404 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1405}
1406
1407
1408/**
1409 * @opcode 0x53
1410 */
1411FNIEMOP_DEF(iemOp_push_eBX)
1412{
1413 IEMOP_MNEMONIC(push_rBX, "push rBX");
1414 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1415}
1416
1417
1418/**
1419 * @opcode 0x54
1420 */
1421FNIEMOP_DEF(iemOp_push_eSP)
1422{
1423 IEMOP_MNEMONIC(push_rSP, "push rSP");
1424 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1425 {
1426 IEM_MC_BEGIN(0, 1);
1427 IEM_MC_LOCAL(uint16_t, u16Value);
1428 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1429 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1430 IEM_MC_PUSH_U16(u16Value);
1431 IEM_MC_ADVANCE_RIP();
1432 IEM_MC_END();
1433 }
1434 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1435}
1436
1437
1438/**
1439 * @opcode 0x55
1440 */
1441FNIEMOP_DEF(iemOp_push_eBP)
1442{
1443 IEMOP_MNEMONIC(push_rBP, "push rBP");
1444 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1445}
1446
1447
1448/**
1449 * @opcode 0x56
1450 */
1451FNIEMOP_DEF(iemOp_push_eSI)
1452{
1453 IEMOP_MNEMONIC(push_rSI, "push rSI");
1454 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1455}
1456
1457
1458/**
1459 * @opcode 0x57
1460 */
1461FNIEMOP_DEF(iemOp_push_eDI)
1462{
1463 IEMOP_MNEMONIC(push_rDI, "push rDI");
1464 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1465}
1466
1467
1468/**
1469 * Common 'pop register' helper.
1470 */
1471FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1472{
1473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1474 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1475 {
1476 iReg |= pVCpu->iem.s.uRexB;
1477 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1478 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1479 }
1480
1481 switch (pVCpu->iem.s.enmEffOpSize)
1482 {
1483 case IEMMODE_16BIT:
1484 IEM_MC_BEGIN(0, 1);
1485 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1486 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1487 IEM_MC_POP_U16(pu16Dst);
1488 IEM_MC_ADVANCE_RIP();
1489 IEM_MC_END();
1490 break;
1491
1492 case IEMMODE_32BIT:
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1495 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1496 IEM_MC_POP_U32(pu32Dst);
1497 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1498 IEM_MC_ADVANCE_RIP();
1499 IEM_MC_END();
1500 break;
1501
1502 case IEMMODE_64BIT:
1503 IEM_MC_BEGIN(0, 1);
1504 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1505 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1506 IEM_MC_POP_U64(pu64Dst);
1507 IEM_MC_ADVANCE_RIP();
1508 IEM_MC_END();
1509 break;
1510 }
1511
1512 return VINF_SUCCESS;
1513}
1514
1515
1516/**
1517 * @opcode 0x58
1518 */
1519FNIEMOP_DEF(iemOp_pop_eAX)
1520{
1521 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1522 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1523}
1524
1525
1526/**
1527 * @opcode 0x59
1528 */
1529FNIEMOP_DEF(iemOp_pop_eCX)
1530{
1531 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1532 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1533}
1534
1535
1536/**
1537 * @opcode 0x5a
1538 */
1539FNIEMOP_DEF(iemOp_pop_eDX)
1540{
1541 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1542 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1543}
1544
1545
1546/**
1547 * @opcode 0x5b
1548 */
1549FNIEMOP_DEF(iemOp_pop_eBX)
1550{
1551 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1552 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1553}
1554
1555
1556/**
1557 * @opcode 0x5c
1558 */
1559FNIEMOP_DEF(iemOp_pop_eSP)
1560{
1561 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1562 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1563 {
1564 if (pVCpu->iem.s.uRexB)
1565 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1566 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1567 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1568 }
1569
1570 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1571 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1572 /** @todo add testcase for this instruction. */
1573 switch (pVCpu->iem.s.enmEffOpSize)
1574 {
1575 case IEMMODE_16BIT:
1576 IEM_MC_BEGIN(0, 1);
1577 IEM_MC_LOCAL(uint16_t, u16Dst);
1578 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1579 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1580 IEM_MC_ADVANCE_RIP();
1581 IEM_MC_END();
1582 break;
1583
1584 case IEMMODE_32BIT:
1585 IEM_MC_BEGIN(0, 1);
1586 IEM_MC_LOCAL(uint32_t, u32Dst);
1587 IEM_MC_POP_U32(&u32Dst);
1588 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1589 IEM_MC_ADVANCE_RIP();
1590 IEM_MC_END();
1591 break;
1592
1593 case IEMMODE_64BIT:
1594 IEM_MC_BEGIN(0, 1);
1595 IEM_MC_LOCAL(uint64_t, u64Dst);
1596 IEM_MC_POP_U64(&u64Dst);
1597 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1598 IEM_MC_ADVANCE_RIP();
1599 IEM_MC_END();
1600 break;
1601 }
1602
1603 return VINF_SUCCESS;
1604}
1605
1606
1607/**
1608 * @opcode 0x5d
1609 */
1610FNIEMOP_DEF(iemOp_pop_eBP)
1611{
1612 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1613 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1614}
1615
1616
1617/**
1618 * @opcode 0x5e
1619 */
1620FNIEMOP_DEF(iemOp_pop_eSI)
1621{
1622 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1623 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1624}
1625
1626
1627/**
1628 * @opcode 0x5f
1629 */
1630FNIEMOP_DEF(iemOp_pop_eDI)
1631{
1632 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1633 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1634}
1635
1636
1637/**
1638 * @opcode 0x60
1639 */
1640FNIEMOP_DEF(iemOp_pusha)
1641{
1642 IEMOP_MNEMONIC(pusha, "pusha");
1643 IEMOP_HLP_MIN_186();
1644 IEMOP_HLP_NO_64BIT();
1645 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1646 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1647 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1648 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1649}
1650
1651
1652/**
1653 * @opcode 0x61
1654 */
1655FNIEMOP_DEF(iemOp_popa__mvex)
1656{
1657 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1658 {
1659 IEMOP_MNEMONIC(popa, "popa");
1660 IEMOP_HLP_MIN_186();
1661 IEMOP_HLP_NO_64BIT();
1662 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1663 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1664 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1665 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1666 }
1667 IEMOP_MNEMONIC(mvex, "mvex");
1668 Log(("mvex prefix is not supported!\n"));
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670}
1671
1672
1673/**
1674 * @opcode 0x62
1675 * @opmnemonic bound
1676 * @op1 Gv
1677 * @op2 Ma
1678 * @opmincpu 80186
1679 * @ophints harmless invalid_64
1680 */
1681FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1682// IEMOP_HLP_MIN_186();
1683
1684
1685/** Opcode 0x63 - non-64-bit modes. */
1686FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1687{
1688 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1689 IEMOP_HLP_MIN_286();
1690 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1692
1693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1694 {
1695 /* Register */
1696 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1697 IEM_MC_BEGIN(3, 0);
1698 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1699 IEM_MC_ARG(uint16_t, u16Src, 1);
1700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1701
1702 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1703 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1704 IEM_MC_REF_EFLAGS(pEFlags);
1705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1706
1707 IEM_MC_ADVANCE_RIP();
1708 IEM_MC_END();
1709 }
1710 else
1711 {
1712 /* Memory */
1713 IEM_MC_BEGIN(3, 2);
1714 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1715 IEM_MC_ARG(uint16_t, u16Src, 1);
1716 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1718
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1720 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1721 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1722 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1723 IEM_MC_FETCH_EFLAGS(EFlags);
1724 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1725
1726 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1727 IEM_MC_COMMIT_EFLAGS(EFlags);
1728 IEM_MC_ADVANCE_RIP();
1729 IEM_MC_END();
1730 }
1731 return VINF_SUCCESS;
1732
1733}
1734
1735
1736/**
1737 * @opcode 0x63
1738 *
1739 * @note This is a weird one. It works like a regular move instruction if
1740 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1741 * @todo This definitely needs a testcase to verify the odd cases. */
1742FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1743{
1744 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1745
1746 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748
1749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1750 {
1751 /*
1752 * Register to register.
1753 */
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEM_MC_BEGIN(0, 1);
1756 IEM_MC_LOCAL(uint64_t, u64Value);
1757 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1758 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1759 IEM_MC_ADVANCE_RIP();
1760 IEM_MC_END();
1761 }
1762 else
1763 {
1764 /*
1765 * We're loading a register from memory.
1766 */
1767 IEM_MC_BEGIN(0, 2);
1768 IEM_MC_LOCAL(uint64_t, u64Value);
1769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1773 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 }
1777 return VINF_SUCCESS;
1778}
1779
1780
1781/**
1782 * @opcode 0x64
1783 * @opmnemonic segfs
1784 * @opmincpu 80386
1785 * @opgroup op_prefixes
1786 */
1787FNIEMOP_DEF(iemOp_seg_FS)
1788{
1789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1790 IEMOP_HLP_MIN_386();
1791
1792 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1793 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1794
1795 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1796 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1797}
1798
1799
1800/**
1801 * @opcode 0x65
1802 * @opmnemonic seggs
1803 * @opmincpu 80386
1804 * @opgroup op_prefixes
1805 */
1806FNIEMOP_DEF(iemOp_seg_GS)
1807{
1808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1809 IEMOP_HLP_MIN_386();
1810
1811 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1812 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1813
1814 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1815 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1816}
1817
1818
1819/**
1820 * @opcode 0x66
1821 * @opmnemonic opsize
1822 * @openc prefix
1823 * @opmincpu 80386
1824 * @ophints harmless
1825 * @opgroup op_prefixes
1826 */
1827FNIEMOP_DEF(iemOp_op_size)
1828{
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1830 IEMOP_HLP_MIN_386();
1831
1832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1833 iemRecalEffOpSize(pVCpu);
1834
1835 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1836 when REPZ or REPNZ are present. */
1837 if (pVCpu->iem.s.idxPrefix == 0)
1838 pVCpu->iem.s.idxPrefix = 1;
1839
1840 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1841 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1842}
1843
1844
1845/**
1846 * @opcode 0x67
1847 * @opmnemonic addrsize
1848 * @openc prefix
1849 * @opmincpu 80386
1850 * @ophints harmless
1851 * @opgroup op_prefixes
1852 */
1853FNIEMOP_DEF(iemOp_addr_size)
1854{
1855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1856 IEMOP_HLP_MIN_386();
1857
1858 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1859 switch (pVCpu->iem.s.enmDefAddrMode)
1860 {
1861 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1862 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1863 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1864 default: AssertFailed();
1865 }
1866
1867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1869}
1870
1871
1872/**
1873 * @opcode 0x68
1874 */
1875FNIEMOP_DEF(iemOp_push_Iz)
1876{
1877 IEMOP_MNEMONIC(push_Iz, "push Iz");
1878 IEMOP_HLP_MIN_186();
1879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1880 switch (pVCpu->iem.s.enmEffOpSize)
1881 {
1882 case IEMMODE_16BIT:
1883 {
1884 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1886 IEM_MC_BEGIN(0,0);
1887 IEM_MC_PUSH_U16(u16Imm);
1888 IEM_MC_ADVANCE_RIP();
1889 IEM_MC_END();
1890 return VINF_SUCCESS;
1891 }
1892
1893 case IEMMODE_32BIT:
1894 {
1895 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1897 IEM_MC_BEGIN(0,0);
1898 IEM_MC_PUSH_U32(u32Imm);
1899 IEM_MC_ADVANCE_RIP();
1900 IEM_MC_END();
1901 return VINF_SUCCESS;
1902 }
1903
1904 case IEMMODE_64BIT:
1905 {
1906 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1908 IEM_MC_BEGIN(0,0);
1909 IEM_MC_PUSH_U64(u64Imm);
1910 IEM_MC_ADVANCE_RIP();
1911 IEM_MC_END();
1912 return VINF_SUCCESS;
1913 }
1914
1915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1916 }
1917}
1918
1919
1920/**
1921 * @opcode 0x69
1922 */
1923FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1924{
1925 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1926 IEMOP_HLP_MIN_186();
1927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1929
1930 switch (pVCpu->iem.s.enmEffOpSize)
1931 {
1932 case IEMMODE_16BIT:
1933 {
1934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1935 {
1936 /* register operand */
1937 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1939
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1942 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1944 IEM_MC_LOCAL(uint16_t, u16Tmp);
1945
1946 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1947 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1948 IEM_MC_REF_EFLAGS(pEFlags);
1949 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1950 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1951
1952 IEM_MC_ADVANCE_RIP();
1953 IEM_MC_END();
1954 }
1955 else
1956 {
1957 /* memory operand */
1958 IEM_MC_BEGIN(3, 2);
1959 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1960 IEM_MC_ARG(uint16_t, u16Src, 1);
1961 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1962 IEM_MC_LOCAL(uint16_t, u16Tmp);
1963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1964
1965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1966 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1967 IEM_MC_ASSIGN(u16Src, u16Imm);
1968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1969 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1970 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1971 IEM_MC_REF_EFLAGS(pEFlags);
1972 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1973 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1974
1975 IEM_MC_ADVANCE_RIP();
1976 IEM_MC_END();
1977 }
1978 return VINF_SUCCESS;
1979 }
1980
1981 case IEMMODE_32BIT:
1982 {
1983 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1984 {
1985 /* register operand */
1986 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1988
1989 IEM_MC_BEGIN(3, 1);
1990 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1991 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1993 IEM_MC_LOCAL(uint32_t, u32Tmp);
1994
1995 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1996 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1997 IEM_MC_REF_EFLAGS(pEFlags);
1998 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1999 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2000
2001 IEM_MC_ADVANCE_RIP();
2002 IEM_MC_END();
2003 }
2004 else
2005 {
2006 /* memory operand */
2007 IEM_MC_BEGIN(3, 2);
2008 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2009 IEM_MC_ARG(uint32_t, u32Src, 1);
2010 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2011 IEM_MC_LOCAL(uint32_t, u32Tmp);
2012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2013
2014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2015 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2016 IEM_MC_ASSIGN(u32Src, u32Imm);
2017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2018 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2019 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2020 IEM_MC_REF_EFLAGS(pEFlags);
2021 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2022 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2023
2024 IEM_MC_ADVANCE_RIP();
2025 IEM_MC_END();
2026 }
2027 return VINF_SUCCESS;
2028 }
2029
2030 case IEMMODE_64BIT:
2031 {
2032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2033 {
2034 /* register operand */
2035 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2037
2038 IEM_MC_BEGIN(3, 1);
2039 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2040 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2041 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2042 IEM_MC_LOCAL(uint64_t, u64Tmp);
2043
2044 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2045 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2046 IEM_MC_REF_EFLAGS(pEFlags);
2047 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2048 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2049
2050 IEM_MC_ADVANCE_RIP();
2051 IEM_MC_END();
2052 }
2053 else
2054 {
2055 /* memory operand */
2056 IEM_MC_BEGIN(3, 2);
2057 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2058 IEM_MC_ARG(uint64_t, u64Src, 1);
2059 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2060 IEM_MC_LOCAL(uint64_t, u64Tmp);
2061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2062
2063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2064 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2065 IEM_MC_ASSIGN(u64Src, u64Imm);
2066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2067 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2068 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2069 IEM_MC_REF_EFLAGS(pEFlags);
2070 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2071 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2072
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 }
2076 return VINF_SUCCESS;
2077 }
2078 }
2079 AssertFailedReturn(VERR_IEM_IPE_9);
2080}
2081
2082
2083/**
2084 * @opcode 0x6a
2085 */
2086FNIEMOP_DEF(iemOp_push_Ib)
2087{
2088 IEMOP_MNEMONIC(push_Ib, "push Ib");
2089 IEMOP_HLP_MIN_186();
2090 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2092 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2093
2094 IEM_MC_BEGIN(0,0);
2095 switch (pVCpu->iem.s.enmEffOpSize)
2096 {
2097 case IEMMODE_16BIT:
2098 IEM_MC_PUSH_U16(i8Imm);
2099 break;
2100 case IEMMODE_32BIT:
2101 IEM_MC_PUSH_U32(i8Imm);
2102 break;
2103 case IEMMODE_64BIT:
2104 IEM_MC_PUSH_U64(i8Imm);
2105 break;
2106 }
2107 IEM_MC_ADVANCE_RIP();
2108 IEM_MC_END();
2109 return VINF_SUCCESS;
2110}
2111
2112
2113/**
2114 * @opcode 0x6b
2115 */
2116FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2117{
2118 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2119 IEMOP_HLP_MIN_186();
2120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2122
2123 switch (pVCpu->iem.s.enmEffOpSize)
2124 {
2125 case IEMMODE_16BIT:
2126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2127 {
2128 /* register operand */
2129 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2131
2132 IEM_MC_BEGIN(3, 1);
2133 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2134 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2135 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2136 IEM_MC_LOCAL(uint16_t, u16Tmp);
2137
2138 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2139 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2140 IEM_MC_REF_EFLAGS(pEFlags);
2141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2142 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2143
2144 IEM_MC_ADVANCE_RIP();
2145 IEM_MC_END();
2146 }
2147 else
2148 {
2149 /* memory operand */
2150 IEM_MC_BEGIN(3, 2);
2151 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2152 IEM_MC_ARG(uint16_t, u16Src, 1);
2153 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2154 IEM_MC_LOCAL(uint16_t, u16Tmp);
2155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2156
2157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2158 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2159 IEM_MC_ASSIGN(u16Src, u16Imm);
2160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2161 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2162 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2163 IEM_MC_REF_EFLAGS(pEFlags);
2164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2165 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2166
2167 IEM_MC_ADVANCE_RIP();
2168 IEM_MC_END();
2169 }
2170 return VINF_SUCCESS;
2171
2172 case IEMMODE_32BIT:
2173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2174 {
2175 /* register operand */
2176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178
2179 IEM_MC_BEGIN(3, 1);
2180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2181 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2183 IEM_MC_LOCAL(uint32_t, u32Tmp);
2184
2185 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2186 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2187 IEM_MC_REF_EFLAGS(pEFlags);
2188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2189 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2190
2191 IEM_MC_ADVANCE_RIP();
2192 IEM_MC_END();
2193 }
2194 else
2195 {
2196 /* memory operand */
2197 IEM_MC_BEGIN(3, 2);
2198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2199 IEM_MC_ARG(uint32_t, u32Src, 1);
2200 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2201 IEM_MC_LOCAL(uint32_t, u32Tmp);
2202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2203
2204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2205 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2206 IEM_MC_ASSIGN(u32Src, u32Imm);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2209 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2210 IEM_MC_REF_EFLAGS(pEFlags);
2211 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2212 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2213
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 }
2217 return VINF_SUCCESS;
2218
2219 case IEMMODE_64BIT:
2220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2221 {
2222 /* register operand */
2223 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2225
2226 IEM_MC_BEGIN(3, 1);
2227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2228 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2230 IEM_MC_LOCAL(uint64_t, u64Tmp);
2231
2232 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2233 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2234 IEM_MC_REF_EFLAGS(pEFlags);
2235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2236 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2237
2238 IEM_MC_ADVANCE_RIP();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /* memory operand */
2244 IEM_MC_BEGIN(3, 2);
2245 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2246 IEM_MC_ARG(uint64_t, u64Src, 1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint64_t, u64Tmp);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2252 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2253 IEM_MC_ASSIGN(u64Src, u64Imm);
2254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2255 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2256 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2257 IEM_MC_REF_EFLAGS(pEFlags);
2258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2259 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2260
2261 IEM_MC_ADVANCE_RIP();
2262 IEM_MC_END();
2263 }
2264 return VINF_SUCCESS;
2265 }
2266 AssertFailedReturn(VERR_IEM_IPE_8);
2267}
2268
2269
2270/**
2271 * @opcode 0x6c
2272 */
2273FNIEMOP_DEF(iemOp_insb_Yb_DX)
2274{
2275 IEMOP_HLP_MIN_186();
2276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2277 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2278 {
2279 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2280 switch (pVCpu->iem.s.enmEffAddrMode)
2281 {
2282 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2283 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2284 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2286 }
2287 }
2288 else
2289 {
2290 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2291 switch (pVCpu->iem.s.enmEffAddrMode)
2292 {
2293 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2294 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2295 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2297 }
2298 }
2299}
2300
2301
2302/**
2303 * @opcode 0x6d
2304 */
2305FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2306{
2307 IEMOP_HLP_MIN_186();
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2310 {
2311 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2312 switch (pVCpu->iem.s.enmEffOpSize)
2313 {
2314 case IEMMODE_16BIT:
2315 switch (pVCpu->iem.s.enmEffAddrMode)
2316 {
2317 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2318 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2319 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2321 }
2322 break;
2323 case IEMMODE_64BIT:
2324 case IEMMODE_32BIT:
2325 switch (pVCpu->iem.s.enmEffAddrMode)
2326 {
2327 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2328 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2329 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2331 }
2332 break;
2333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2334 }
2335 }
2336 else
2337 {
2338 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2339 switch (pVCpu->iem.s.enmEffOpSize)
2340 {
2341 case IEMMODE_16BIT:
2342 switch (pVCpu->iem.s.enmEffAddrMode)
2343 {
2344 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2345 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2346 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2348 }
2349 break;
2350 case IEMMODE_64BIT:
2351 case IEMMODE_32BIT:
2352 switch (pVCpu->iem.s.enmEffAddrMode)
2353 {
2354 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2355 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2356 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2358 }
2359 break;
2360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2361 }
2362 }
2363}
2364
2365
2366/**
2367 * @opcode 0x6e
2368 */
2369FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2370{
2371 IEMOP_HLP_MIN_186();
2372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2373 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2374 {
2375 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2376 switch (pVCpu->iem.s.enmEffAddrMode)
2377 {
2378 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2379 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2380 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2382 }
2383 }
2384 else
2385 {
2386 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2387 switch (pVCpu->iem.s.enmEffAddrMode)
2388 {
2389 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2390 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2391 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2393 }
2394 }
2395}
2396
2397
2398/**
2399 * @opcode 0x6f
2400 */
2401FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2402{
2403 IEMOP_HLP_MIN_186();
2404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2406 {
2407 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2408 switch (pVCpu->iem.s.enmEffOpSize)
2409 {
2410 case IEMMODE_16BIT:
2411 switch (pVCpu->iem.s.enmEffAddrMode)
2412 {
2413 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2414 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2415 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2417 }
2418 break;
2419 case IEMMODE_64BIT:
2420 case IEMMODE_32BIT:
2421 switch (pVCpu->iem.s.enmEffAddrMode)
2422 {
2423 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2424 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2425 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2427 }
2428 break;
2429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2430 }
2431 }
2432 else
2433 {
2434 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2435 switch (pVCpu->iem.s.enmEffOpSize)
2436 {
2437 case IEMMODE_16BIT:
2438 switch (pVCpu->iem.s.enmEffAddrMode)
2439 {
2440 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2441 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2442 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2444 }
2445 break;
2446 case IEMMODE_64BIT:
2447 case IEMMODE_32BIT:
2448 switch (pVCpu->iem.s.enmEffAddrMode)
2449 {
2450 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2451 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2452 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2454 }
2455 break;
2456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2457 }
2458 }
2459}
2460
2461
2462/**
2463 * @opcode 0x70
2464 */
2465FNIEMOP_DEF(iemOp_jo_Jb)
2466{
2467 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2468 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2471
2472 IEM_MC_BEGIN(0, 0);
2473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2474 IEM_MC_REL_JMP_S8(i8Imm);
2475 } IEM_MC_ELSE() {
2476 IEM_MC_ADVANCE_RIP();
2477 } IEM_MC_ENDIF();
2478 IEM_MC_END();
2479 return VINF_SUCCESS;
2480}
2481
2482
2483/**
2484 * @opcode 0x71
2485 */
2486FNIEMOP_DEF(iemOp_jno_Jb)
2487{
2488 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2489 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2492
2493 IEM_MC_BEGIN(0, 0);
2494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2495 IEM_MC_ADVANCE_RIP();
2496 } IEM_MC_ELSE() {
2497 IEM_MC_REL_JMP_S8(i8Imm);
2498 } IEM_MC_ENDIF();
2499 IEM_MC_END();
2500 return VINF_SUCCESS;
2501}
2502
2503/**
2504 * @opcode 0x72
2505 */
2506FNIEMOP_DEF(iemOp_jc_Jb)
2507{
2508 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2509 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2511 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2512
2513 IEM_MC_BEGIN(0, 0);
2514 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2515 IEM_MC_REL_JMP_S8(i8Imm);
2516 } IEM_MC_ELSE() {
2517 IEM_MC_ADVANCE_RIP();
2518 } IEM_MC_ENDIF();
2519 IEM_MC_END();
2520 return VINF_SUCCESS;
2521}
2522
2523
2524/**
2525 * @opcode 0x73
2526 */
2527FNIEMOP_DEF(iemOp_jnc_Jb)
2528{
2529 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2530 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2533
2534 IEM_MC_BEGIN(0, 0);
2535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2536 IEM_MC_ADVANCE_RIP();
2537 } IEM_MC_ELSE() {
2538 IEM_MC_REL_JMP_S8(i8Imm);
2539 } IEM_MC_ENDIF();
2540 IEM_MC_END();
2541 return VINF_SUCCESS;
2542}
2543
2544
2545/**
2546 * @opcode 0x74
2547 */
2548FNIEMOP_DEF(iemOp_je_Jb)
2549{
2550 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2554
2555 IEM_MC_BEGIN(0, 0);
2556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2557 IEM_MC_REL_JMP_S8(i8Imm);
2558 } IEM_MC_ELSE() {
2559 IEM_MC_ADVANCE_RIP();
2560 } IEM_MC_ENDIF();
2561 IEM_MC_END();
2562 return VINF_SUCCESS;
2563}
2564
2565
2566/**
2567 * @opcode 0x75
2568 */
2569FNIEMOP_DEF(iemOp_jne_Jb)
2570{
2571 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2572 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2575
2576 IEM_MC_BEGIN(0, 0);
2577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2578 IEM_MC_ADVANCE_RIP();
2579 } IEM_MC_ELSE() {
2580 IEM_MC_REL_JMP_S8(i8Imm);
2581 } IEM_MC_ENDIF();
2582 IEM_MC_END();
2583 return VINF_SUCCESS;
2584}
2585
2586
2587/**
2588 * @opcode 0x76
2589 */
2590FNIEMOP_DEF(iemOp_jbe_Jb)
2591{
2592 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2593 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2595 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2596
2597 IEM_MC_BEGIN(0, 0);
2598 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2599 IEM_MC_REL_JMP_S8(i8Imm);
2600 } IEM_MC_ELSE() {
2601 IEM_MC_ADVANCE_RIP();
2602 } IEM_MC_ENDIF();
2603 IEM_MC_END();
2604 return VINF_SUCCESS;
2605}
2606
2607
2608/**
2609 * @opcode 0x77
2610 */
2611FNIEMOP_DEF(iemOp_jnbe_Jb)
2612{
2613 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2614 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2617
2618 IEM_MC_BEGIN(0, 0);
2619 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2620 IEM_MC_ADVANCE_RIP();
2621 } IEM_MC_ELSE() {
2622 IEM_MC_REL_JMP_S8(i8Imm);
2623 } IEM_MC_ENDIF();
2624 IEM_MC_END();
2625 return VINF_SUCCESS;
2626}
2627
2628
2629/**
2630 * @opcode 0x78
2631 */
2632FNIEMOP_DEF(iemOp_js_Jb)
2633{
2634 IEMOP_MNEMONIC(js_Jb, "js Jb");
2635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2637 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2638
2639 IEM_MC_BEGIN(0, 0);
2640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2641 IEM_MC_REL_JMP_S8(i8Imm);
2642 } IEM_MC_ELSE() {
2643 IEM_MC_ADVANCE_RIP();
2644 } IEM_MC_ENDIF();
2645 IEM_MC_END();
2646 return VINF_SUCCESS;
2647}
2648
2649
2650/**
2651 * @opcode 0x79
2652 */
2653FNIEMOP_DEF(iemOp_jns_Jb)
2654{
2655 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2656 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2658 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2659
2660 IEM_MC_BEGIN(0, 0);
2661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2662 IEM_MC_ADVANCE_RIP();
2663 } IEM_MC_ELSE() {
2664 IEM_MC_REL_JMP_S8(i8Imm);
2665 } IEM_MC_ENDIF();
2666 IEM_MC_END();
2667 return VINF_SUCCESS;
2668}
2669
2670
2671/**
2672 * @opcode 0x7a
2673 */
2674FNIEMOP_DEF(iemOp_jp_Jb)
2675{
2676 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2677 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2679 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2680
2681 IEM_MC_BEGIN(0, 0);
2682 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2683 IEM_MC_REL_JMP_S8(i8Imm);
2684 } IEM_MC_ELSE() {
2685 IEM_MC_ADVANCE_RIP();
2686 } IEM_MC_ENDIF();
2687 IEM_MC_END();
2688 return VINF_SUCCESS;
2689}
2690
2691
2692/**
2693 * @opcode 0x7b
2694 */
2695FNIEMOP_DEF(iemOp_jnp_Jb)
2696{
2697 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2698 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2701
2702 IEM_MC_BEGIN(0, 0);
2703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2704 IEM_MC_ADVANCE_RIP();
2705 } IEM_MC_ELSE() {
2706 IEM_MC_REL_JMP_S8(i8Imm);
2707 } IEM_MC_ENDIF();
2708 IEM_MC_END();
2709 return VINF_SUCCESS;
2710}
2711
2712
2713/**
2714 * @opcode 0x7c
2715 */
2716FNIEMOP_DEF(iemOp_jl_Jb)
2717{
2718 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2719 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2722
2723 IEM_MC_BEGIN(0, 0);
2724 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2725 IEM_MC_REL_JMP_S8(i8Imm);
2726 } IEM_MC_ELSE() {
2727 IEM_MC_ADVANCE_RIP();
2728 } IEM_MC_ENDIF();
2729 IEM_MC_END();
2730 return VINF_SUCCESS;
2731}
2732
2733
2734/**
2735 * @opcode 0x7d
2736 */
2737FNIEMOP_DEF(iemOp_jnl_Jb)
2738{
2739 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2740 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2743
2744 IEM_MC_BEGIN(0, 0);
2745 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2746 IEM_MC_ADVANCE_RIP();
2747 } IEM_MC_ELSE() {
2748 IEM_MC_REL_JMP_S8(i8Imm);
2749 } IEM_MC_ENDIF();
2750 IEM_MC_END();
2751 return VINF_SUCCESS;
2752}
2753
2754
2755/**
2756 * @opcode 0x7e
2757 */
2758FNIEMOP_DEF(iemOp_jle_Jb)
2759{
2760 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2761 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2764
2765 IEM_MC_BEGIN(0, 0);
2766 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2767 IEM_MC_REL_JMP_S8(i8Imm);
2768 } IEM_MC_ELSE() {
2769 IEM_MC_ADVANCE_RIP();
2770 } IEM_MC_ENDIF();
2771 IEM_MC_END();
2772 return VINF_SUCCESS;
2773}
2774
2775
2776/**
2777 * @opcode 0x7f
2778 */
2779FNIEMOP_DEF(iemOp_jnle_Jb)
2780{
2781 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2782 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2785
2786 IEM_MC_BEGIN(0, 0);
2787 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2788 IEM_MC_ADVANCE_RIP();
2789 } IEM_MC_ELSE() {
2790 IEM_MC_REL_JMP_S8(i8Imm);
2791 } IEM_MC_ENDIF();
2792 IEM_MC_END();
2793 return VINF_SUCCESS;
2794}
2795
2796
2797/**
2798 * @opcode 0x80
2799 */
2800FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2801{
2802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2803 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2804 {
2805 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2806 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2807 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2808 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2809 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2810 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2811 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2812 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2813 }
2814 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2815
2816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2817 {
2818 /* register target */
2819 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821 IEM_MC_BEGIN(3, 0);
2822 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2823 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2824 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2825
2826 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2827 IEM_MC_REF_EFLAGS(pEFlags);
2828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2829
2830 IEM_MC_ADVANCE_RIP();
2831 IEM_MC_END();
2832 }
2833 else
2834 {
2835 /* memory target */
2836 uint32_t fAccess;
2837 if (pImpl->pfnLockedU8)
2838 fAccess = IEM_ACCESS_DATA_RW;
2839 else /* CMP */
2840 fAccess = IEM_ACCESS_DATA_R;
2841 IEM_MC_BEGIN(3, 2);
2842 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2843 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2845
2846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2847 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2848 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2849 if (pImpl->pfnLockedU8)
2850 IEMOP_HLP_DONE_DECODING();
2851 else
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853
2854 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2855 IEM_MC_FETCH_EFLAGS(EFlags);
2856 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2857 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2858 else
2859 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2860
2861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2862 IEM_MC_COMMIT_EFLAGS(EFlags);
2863 IEM_MC_ADVANCE_RIP();
2864 IEM_MC_END();
2865 }
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/**
2871 * @opcode 0x81
2872 */
2873FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2874{
2875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2876 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2877 {
2878 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2879 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2880 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2881 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2882 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2883 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2884 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2885 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2886 }
2887 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2888
2889 switch (pVCpu->iem.s.enmEffOpSize)
2890 {
2891 case IEMMODE_16BIT:
2892 {
2893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2894 {
2895 /* register target */
2896 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2898 IEM_MC_BEGIN(3, 0);
2899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2900 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2901 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2902
2903 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2904 IEM_MC_REF_EFLAGS(pEFlags);
2905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2906
2907 IEM_MC_ADVANCE_RIP();
2908 IEM_MC_END();
2909 }
2910 else
2911 {
2912 /* memory target */
2913 uint32_t fAccess;
2914 if (pImpl->pfnLockedU16)
2915 fAccess = IEM_ACCESS_DATA_RW;
2916 else /* CMP, TEST */
2917 fAccess = IEM_ACCESS_DATA_R;
2918 IEM_MC_BEGIN(3, 2);
2919 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2920 IEM_MC_ARG(uint16_t, u16Src, 1);
2921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2923
2924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2926 IEM_MC_ASSIGN(u16Src, u16Imm);
2927 if (pImpl->pfnLockedU16)
2928 IEMOP_HLP_DONE_DECODING();
2929 else
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2932 IEM_MC_FETCH_EFLAGS(EFlags);
2933 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2935 else
2936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2937
2938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2939 IEM_MC_COMMIT_EFLAGS(EFlags);
2940 IEM_MC_ADVANCE_RIP();
2941 IEM_MC_END();
2942 }
2943 break;
2944 }
2945
2946 case IEMMODE_32BIT:
2947 {
2948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2949 {
2950 /* register target */
2951 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEM_MC_BEGIN(3, 0);
2954 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2955 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2956 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2957
2958 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2959 IEM_MC_REF_EFLAGS(pEFlags);
2960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2961 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2962
2963 IEM_MC_ADVANCE_RIP();
2964 IEM_MC_END();
2965 }
2966 else
2967 {
2968 /* memory target */
2969 uint32_t fAccess;
2970 if (pImpl->pfnLockedU32)
2971 fAccess = IEM_ACCESS_DATA_RW;
2972 else /* CMP, TEST */
2973 fAccess = IEM_ACCESS_DATA_R;
2974 IEM_MC_BEGIN(3, 2);
2975 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2976 IEM_MC_ARG(uint32_t, u32Src, 1);
2977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2979
2980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2981 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2982 IEM_MC_ASSIGN(u32Src, u32Imm);
2983 if (pImpl->pfnLockedU32)
2984 IEMOP_HLP_DONE_DECODING();
2985 else
2986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2987 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2988 IEM_MC_FETCH_EFLAGS(EFlags);
2989 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2991 else
2992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2993
2994 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2995 IEM_MC_COMMIT_EFLAGS(EFlags);
2996 IEM_MC_ADVANCE_RIP();
2997 IEM_MC_END();
2998 }
2999 break;
3000 }
3001
3002 case IEMMODE_64BIT:
3003 {
3004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3005 {
3006 /* register target */
3007 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3009 IEM_MC_BEGIN(3, 0);
3010 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3011 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3012 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3013
3014 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3015 IEM_MC_REF_EFLAGS(pEFlags);
3016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3017
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 else
3022 {
3023 /* memory target */
3024 uint32_t fAccess;
3025 if (pImpl->pfnLockedU64)
3026 fAccess = IEM_ACCESS_DATA_RW;
3027 else /* CMP */
3028 fAccess = IEM_ACCESS_DATA_R;
3029 IEM_MC_BEGIN(3, 2);
3030 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3031 IEM_MC_ARG(uint64_t, u64Src, 1);
3032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3034
3035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3036 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3037 if (pImpl->pfnLockedU64)
3038 IEMOP_HLP_DONE_DECODING();
3039 else
3040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3041 IEM_MC_ASSIGN(u64Src, u64Imm);
3042 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3043 IEM_MC_FETCH_EFLAGS(EFlags);
3044 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3046 else
3047 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3048
3049 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3050 IEM_MC_COMMIT_EFLAGS(EFlags);
3051 IEM_MC_ADVANCE_RIP();
3052 IEM_MC_END();
3053 }
3054 break;
3055 }
3056 }
3057 return VINF_SUCCESS;
3058}
3059
3060
3061/**
3062 * @opcode 0x82
3063 * @opmnemonic grp1_82
3064 * @opgroup op_groups
3065 */
3066FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3067{
3068 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3069 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3070}
3071
3072
3073/**
3074 * @opcode 0x83
3075 */
3076FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3077{
3078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3079 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3080 {
3081 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3082 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3083 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3084 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3085 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3086 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3087 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3088 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3089 }
3090 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3091 to the 386 even if absent in the intel reference manuals and some
3092 3rd party opcode listings. */
3093 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3094
3095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3096 {
3097 /*
3098 * Register target
3099 */
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3102 switch (pVCpu->iem.s.enmEffOpSize)
3103 {
3104 case IEMMODE_16BIT:
3105 {
3106 IEM_MC_BEGIN(3, 0);
3107 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3108 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3109 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3110
3111 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3112 IEM_MC_REF_EFLAGS(pEFlags);
3113 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3114
3115 IEM_MC_ADVANCE_RIP();
3116 IEM_MC_END();
3117 break;
3118 }
3119
3120 case IEMMODE_32BIT:
3121 {
3122 IEM_MC_BEGIN(3, 0);
3123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3124 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3126
3127 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3128 IEM_MC_REF_EFLAGS(pEFlags);
3129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3130 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3131
3132 IEM_MC_ADVANCE_RIP();
3133 IEM_MC_END();
3134 break;
3135 }
3136
3137 case IEMMODE_64BIT:
3138 {
3139 IEM_MC_BEGIN(3, 0);
3140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3141 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3143
3144 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3145 IEM_MC_REF_EFLAGS(pEFlags);
3146 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3147
3148 IEM_MC_ADVANCE_RIP();
3149 IEM_MC_END();
3150 break;
3151 }
3152 }
3153 }
3154 else
3155 {
3156 /*
3157 * Memory target.
3158 */
3159 uint32_t fAccess;
3160 if (pImpl->pfnLockedU16)
3161 fAccess = IEM_ACCESS_DATA_RW;
3162 else /* CMP */
3163 fAccess = IEM_ACCESS_DATA_R;
3164
3165 switch (pVCpu->iem.s.enmEffOpSize)
3166 {
3167 case IEMMODE_16BIT:
3168 {
3169 IEM_MC_BEGIN(3, 2);
3170 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3171 IEM_MC_ARG(uint16_t, u16Src, 1);
3172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3174
3175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3177 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3178 if (pImpl->pfnLockedU16)
3179 IEMOP_HLP_DONE_DECODING();
3180 else
3181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3182 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3183 IEM_MC_FETCH_EFLAGS(EFlags);
3184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3186 else
3187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3188
3189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3190 IEM_MC_COMMIT_EFLAGS(EFlags);
3191 IEM_MC_ADVANCE_RIP();
3192 IEM_MC_END();
3193 break;
3194 }
3195
3196 case IEMMODE_32BIT:
3197 {
3198 IEM_MC_BEGIN(3, 2);
3199 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3200 IEM_MC_ARG(uint32_t, u32Src, 1);
3201 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3203
3204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3205 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3206 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3207 if (pImpl->pfnLockedU32)
3208 IEMOP_HLP_DONE_DECODING();
3209 else
3210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3211 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3212 IEM_MC_FETCH_EFLAGS(EFlags);
3213 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3214 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3215 else
3216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3217
3218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3219 IEM_MC_COMMIT_EFLAGS(EFlags);
3220 IEM_MC_ADVANCE_RIP();
3221 IEM_MC_END();
3222 break;
3223 }
3224
3225 case IEMMODE_64BIT:
3226 {
3227 IEM_MC_BEGIN(3, 2);
3228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3229 IEM_MC_ARG(uint64_t, u64Src, 1);
3230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3232
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3235 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3236 if (pImpl->pfnLockedU64)
3237 IEMOP_HLP_DONE_DECODING();
3238 else
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3241 IEM_MC_FETCH_EFLAGS(EFlags);
3242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3244 else
3245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3246
3247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3248 IEM_MC_COMMIT_EFLAGS(EFlags);
3249 IEM_MC_ADVANCE_RIP();
3250 IEM_MC_END();
3251 break;
3252 }
3253 }
3254 }
3255 return VINF_SUCCESS;
3256}
3257
3258
3259/**
3260 * @opcode 0x84
3261 */
3262FNIEMOP_DEF(iemOp_test_Eb_Gb)
3263{
3264 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3266 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3267}
3268
3269
3270/**
3271 * @opcode 0x85
3272 */
3273FNIEMOP_DEF(iemOp_test_Ev_Gv)
3274{
3275 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3277 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3278}
3279
3280
3281/**
3282 * @opcode 0x86
3283 */
3284FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3285{
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3288
3289 /*
3290 * If rm is denoting a register, no more instruction bytes.
3291 */
3292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3293 {
3294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3295
3296 IEM_MC_BEGIN(0, 2);
3297 IEM_MC_LOCAL(uint8_t, uTmp1);
3298 IEM_MC_LOCAL(uint8_t, uTmp2);
3299
3300 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3301 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3302 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3303 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3304
3305 IEM_MC_ADVANCE_RIP();
3306 IEM_MC_END();
3307 }
3308 else
3309 {
3310 /*
3311 * We're accessing memory.
3312 */
3313/** @todo the register must be committed separately! */
3314 IEM_MC_BEGIN(2, 2);
3315 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3316 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3318
3319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3320 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3321 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3323 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3324
3325 IEM_MC_ADVANCE_RIP();
3326 IEM_MC_END();
3327 }
3328 return VINF_SUCCESS;
3329}
3330
3331
3332/**
3333 * @opcode 0x87
3334 */
3335FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3336{
3337 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3339
3340 /*
3341 * If rm is denoting a register, no more instruction bytes.
3342 */
3343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3344 {
3345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3346
3347 switch (pVCpu->iem.s.enmEffOpSize)
3348 {
3349 case IEMMODE_16BIT:
3350 IEM_MC_BEGIN(0, 2);
3351 IEM_MC_LOCAL(uint16_t, uTmp1);
3352 IEM_MC_LOCAL(uint16_t, uTmp2);
3353
3354 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3355 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3356 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3357 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3358
3359 IEM_MC_ADVANCE_RIP();
3360 IEM_MC_END();
3361 return VINF_SUCCESS;
3362
3363 case IEMMODE_32BIT:
3364 IEM_MC_BEGIN(0, 2);
3365 IEM_MC_LOCAL(uint32_t, uTmp1);
3366 IEM_MC_LOCAL(uint32_t, uTmp2);
3367
3368 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3369 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3370 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3371 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3372
3373 IEM_MC_ADVANCE_RIP();
3374 IEM_MC_END();
3375 return VINF_SUCCESS;
3376
3377 case IEMMODE_64BIT:
3378 IEM_MC_BEGIN(0, 2);
3379 IEM_MC_LOCAL(uint64_t, uTmp1);
3380 IEM_MC_LOCAL(uint64_t, uTmp2);
3381
3382 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3383 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3384 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3385 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3386
3387 IEM_MC_ADVANCE_RIP();
3388 IEM_MC_END();
3389 return VINF_SUCCESS;
3390
3391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3392 }
3393 }
3394 else
3395 {
3396 /*
3397 * We're accessing memory.
3398 */
3399 switch (pVCpu->iem.s.enmEffOpSize)
3400 {
3401/** @todo the register must be committed separately! */
3402 case IEMMODE_16BIT:
3403 IEM_MC_BEGIN(2, 2);
3404 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3405 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3407
3408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3409 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3410 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3411 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3412 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3413
3414 IEM_MC_ADVANCE_RIP();
3415 IEM_MC_END();
3416 return VINF_SUCCESS;
3417
3418 case IEMMODE_32BIT:
3419 IEM_MC_BEGIN(2, 2);
3420 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3421 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3423
3424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3425 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3426 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3427 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3428 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3429
3430 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3431 IEM_MC_ADVANCE_RIP();
3432 IEM_MC_END();
3433 return VINF_SUCCESS;
3434
3435 case IEMMODE_64BIT:
3436 IEM_MC_BEGIN(2, 2);
3437 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3438 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3440
3441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3442 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3443 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3444 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3446
3447 IEM_MC_ADVANCE_RIP();
3448 IEM_MC_END();
3449 return VINF_SUCCESS;
3450
3451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3452 }
3453 }
3454}
3455
3456
3457/**
3458 * @opcode 0x88
3459 */
3460FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3461{
3462 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3463
3464 uint8_t bRm;
3465 IEM_OPCODE_GET_NEXT_U8(&bRm);
3466
3467 /*
3468 * If rm is denoting a register, no more instruction bytes.
3469 */
3470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3471 {
3472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3473 IEM_MC_BEGIN(0, 1);
3474 IEM_MC_LOCAL(uint8_t, u8Value);
3475 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3476 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3477 IEM_MC_ADVANCE_RIP();
3478 IEM_MC_END();
3479 }
3480 else
3481 {
3482 /*
3483 * We're writing a register to memory.
3484 */
3485 IEM_MC_BEGIN(0, 2);
3486 IEM_MC_LOCAL(uint8_t, u8Value);
3487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3491 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3492 IEM_MC_ADVANCE_RIP();
3493 IEM_MC_END();
3494 }
3495 return VINF_SUCCESS;
3496
3497}
3498
3499
3500/**
3501 * @opcode 0x89
3502 */
3503FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3504{
3505 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3506
3507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3508
3509 /*
3510 * If rm is denoting a register, no more instruction bytes.
3511 */
3512 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3513 {
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 switch (pVCpu->iem.s.enmEffOpSize)
3516 {
3517 case IEMMODE_16BIT:
3518 IEM_MC_BEGIN(0, 1);
3519 IEM_MC_LOCAL(uint16_t, u16Value);
3520 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3521 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 break;
3525
3526 case IEMMODE_32BIT:
3527 IEM_MC_BEGIN(0, 1);
3528 IEM_MC_LOCAL(uint32_t, u32Value);
3529 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3530 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3531 IEM_MC_ADVANCE_RIP();
3532 IEM_MC_END();
3533 break;
3534
3535 case IEMMODE_64BIT:
3536 IEM_MC_BEGIN(0, 1);
3537 IEM_MC_LOCAL(uint64_t, u64Value);
3538 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3539 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3540 IEM_MC_ADVANCE_RIP();
3541 IEM_MC_END();
3542 break;
3543 }
3544 }
3545 else
3546 {
3547 /*
3548 * We're writing a register to memory.
3549 */
3550 switch (pVCpu->iem.s.enmEffOpSize)
3551 {
3552 case IEMMODE_16BIT:
3553 IEM_MC_BEGIN(0, 2);
3554 IEM_MC_LOCAL(uint16_t, u16Value);
3555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3558 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3559 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3560 IEM_MC_ADVANCE_RIP();
3561 IEM_MC_END();
3562 break;
3563
3564 case IEMMODE_32BIT:
3565 IEM_MC_BEGIN(0, 2);
3566 IEM_MC_LOCAL(uint32_t, u32Value);
3567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3571 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3572 IEM_MC_ADVANCE_RIP();
3573 IEM_MC_END();
3574 break;
3575
3576 case IEMMODE_64BIT:
3577 IEM_MC_BEGIN(0, 2);
3578 IEM_MC_LOCAL(uint64_t, u64Value);
3579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3582 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3583 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3584 IEM_MC_ADVANCE_RIP();
3585 IEM_MC_END();
3586 break;
3587 }
3588 }
3589 return VINF_SUCCESS;
3590}
3591
3592
3593/**
3594 * @opcode 0x8a
3595 */
3596FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3597{
3598 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3599
3600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3601
3602 /*
3603 * If rm is denoting a register, no more instruction bytes.
3604 */
3605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3606 {
3607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3608 IEM_MC_BEGIN(0, 1);
3609 IEM_MC_LOCAL(uint8_t, u8Value);
3610 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3611 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3612 IEM_MC_ADVANCE_RIP();
3613 IEM_MC_END();
3614 }
3615 else
3616 {
3617 /*
3618 * We're loading a register from memory.
3619 */
3620 IEM_MC_BEGIN(0, 2);
3621 IEM_MC_LOCAL(uint8_t, u8Value);
3622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3625 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3626 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3627 IEM_MC_ADVANCE_RIP();
3628 IEM_MC_END();
3629 }
3630 return VINF_SUCCESS;
3631}
3632
3633
3634/**
3635 * @opcode 0x8b
3636 */
3637FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3638{
3639 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3640
3641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3642
3643 /*
3644 * If rm is denoting a register, no more instruction bytes.
3645 */
3646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3647 {
3648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3649 switch (pVCpu->iem.s.enmEffOpSize)
3650 {
3651 case IEMMODE_16BIT:
3652 IEM_MC_BEGIN(0, 1);
3653 IEM_MC_LOCAL(uint16_t, u16Value);
3654 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3655 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3656 IEM_MC_ADVANCE_RIP();
3657 IEM_MC_END();
3658 break;
3659
3660 case IEMMODE_32BIT:
3661 IEM_MC_BEGIN(0, 1);
3662 IEM_MC_LOCAL(uint32_t, u32Value);
3663 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3664 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3665 IEM_MC_ADVANCE_RIP();
3666 IEM_MC_END();
3667 break;
3668
3669 case IEMMODE_64BIT:
3670 IEM_MC_BEGIN(0, 1);
3671 IEM_MC_LOCAL(uint64_t, u64Value);
3672 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3673 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3674 IEM_MC_ADVANCE_RIP();
3675 IEM_MC_END();
3676 break;
3677 }
3678 }
3679 else
3680 {
3681 /*
3682 * We're loading a register from memory.
3683 */
3684 switch (pVCpu->iem.s.enmEffOpSize)
3685 {
3686 case IEMMODE_16BIT:
3687 IEM_MC_BEGIN(0, 2);
3688 IEM_MC_LOCAL(uint16_t, u16Value);
3689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3692 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3693 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3694 IEM_MC_ADVANCE_RIP();
3695 IEM_MC_END();
3696 break;
3697
3698 case IEMMODE_32BIT:
3699 IEM_MC_BEGIN(0, 2);
3700 IEM_MC_LOCAL(uint32_t, u32Value);
3701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3705 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 break;
3709
3710 case IEMMODE_64BIT:
3711 IEM_MC_BEGIN(0, 2);
3712 IEM_MC_LOCAL(uint64_t, u64Value);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3717 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 break;
3721 }
3722 }
3723 return VINF_SUCCESS;
3724}
3725
3726
3727/**
3728 * opcode 0x63
3729 * @todo Table fixme
3730 */
3731FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3732{
3733 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3734 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3735 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3736 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3737 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3738}
3739
3740
3741/**
3742 * @opcode 0x8c
3743 */
3744FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3745{
3746 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3747
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749
3750 /*
3751 * Check that the destination register exists. The REX.R prefix is ignored.
3752 */
3753 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3754 if ( iSegReg > X86_SREG_GS)
3755 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3756
3757 /*
3758 * If rm is denoting a register, no more instruction bytes.
3759 * In that case, the operand size is respected and the upper bits are
3760 * cleared (starting with some pentium).
3761 */
3762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3763 {
3764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3765 switch (pVCpu->iem.s.enmEffOpSize)
3766 {
3767 case IEMMODE_16BIT:
3768 IEM_MC_BEGIN(0, 1);
3769 IEM_MC_LOCAL(uint16_t, u16Value);
3770 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3771 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3772 IEM_MC_ADVANCE_RIP();
3773 IEM_MC_END();
3774 break;
3775
3776 case IEMMODE_32BIT:
3777 IEM_MC_BEGIN(0, 1);
3778 IEM_MC_LOCAL(uint32_t, u32Value);
3779 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3780 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 break;
3784
3785 case IEMMODE_64BIT:
3786 IEM_MC_BEGIN(0, 1);
3787 IEM_MC_LOCAL(uint64_t, u64Value);
3788 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3789 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3790 IEM_MC_ADVANCE_RIP();
3791 IEM_MC_END();
3792 break;
3793 }
3794 }
3795 else
3796 {
3797 /*
3798 * We're saving the register to memory. The access is word sized
3799 * regardless of operand size prefixes.
3800 */
3801#if 0 /* not necessary */
3802 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3803#endif
3804 IEM_MC_BEGIN(0, 2);
3805 IEM_MC_LOCAL(uint16_t, u16Value);
3806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3809 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3810 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3811 IEM_MC_ADVANCE_RIP();
3812 IEM_MC_END();
3813 }
3814 return VINF_SUCCESS;
3815}
3816
3817
3818
3819
3820/**
3821 * @opcode 0x8d
3822 */
3823FNIEMOP_DEF(iemOp_lea_Gv_M)
3824{
3825 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3828 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3829
3830 switch (pVCpu->iem.s.enmEffOpSize)
3831 {
3832 case IEMMODE_16BIT:
3833 IEM_MC_BEGIN(0, 2);
3834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3835 IEM_MC_LOCAL(uint16_t, u16Cast);
3836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3838 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3839 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 return VINF_SUCCESS;
3843
3844 case IEMMODE_32BIT:
3845 IEM_MC_BEGIN(0, 2);
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847 IEM_MC_LOCAL(uint32_t, u32Cast);
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3851 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3852 IEM_MC_ADVANCE_RIP();
3853 IEM_MC_END();
3854 return VINF_SUCCESS;
3855
3856 case IEMMODE_64BIT:
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 return VINF_SUCCESS;
3865 }
3866 AssertFailedReturn(VERR_IEM_IPE_7);
3867}
3868
3869
3870/**
3871 * @opcode 0x8e
3872 */
3873FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3874{
3875 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3876
3877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3878
3879 /*
3880 * The practical operand size is 16-bit.
3881 */
3882#if 0 /* not necessary */
3883 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3884#endif
3885
3886 /*
3887 * Check that the destination register exists and can be used with this
3888 * instruction. The REX.R prefix is ignored.
3889 */
3890 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3891 if ( iSegReg == X86_SREG_CS
3892 || iSegReg > X86_SREG_GS)
3893 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3894
3895 /*
3896 * If rm is denoting a register, no more instruction bytes.
3897 */
3898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3899 {
3900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3901 IEM_MC_BEGIN(2, 0);
3902 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3903 IEM_MC_ARG(uint16_t, u16Value, 1);
3904 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3905 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3906 IEM_MC_END();
3907 }
3908 else
3909 {
3910 /*
3911 * We're loading the register from memory. The access is word sized
3912 * regardless of operand size prefixes.
3913 */
3914 IEM_MC_BEGIN(2, 1);
3915 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3916 IEM_MC_ARG(uint16_t, u16Value, 1);
3917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3921 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3922 IEM_MC_END();
3923 }
3924 return VINF_SUCCESS;
3925}
3926
3927
3928/** Opcode 0x8f /0. */
3929FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3930{
3931 /* This bugger is rather annoying as it requires rSP to be updated before
3932 doing the effective address calculations. Will eventually require a
3933 split between the R/M+SIB decoding and the effective address
3934 calculation - which is something that is required for any attempt at
3935 reusing this code for a recompiler. It may also be good to have if we
3936 need to delay #UD exception caused by invalid lock prefixes.
3937
3938 For now, we'll do a mostly safe interpreter-only implementation here. */
3939 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3940 * now until tests show it's checked.. */
3941 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3942
3943 /* Register access is relatively easy and can share code. */
3944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3945 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3946
3947 /*
3948 * Memory target.
3949 *
3950 * Intel says that RSP is incremented before it's used in any effective
3951 * address calcuations. This means some serious extra annoyance here since
3952 * we decode and calculate the effective address in one step and like to
3953 * delay committing registers till everything is done.
3954 *
3955 * So, we'll decode and calculate the effective address twice. This will
3956 * require some recoding if turned into a recompiler.
3957 */
3958 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3959
3960#ifndef TST_IEM_CHECK_MC
3961 /* Calc effective address with modified ESP. */
3962/** @todo testcase */
3963 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3964 RTGCPTR GCPtrEff;
3965 VBOXSTRICTRC rcStrict;
3966 switch (pVCpu->iem.s.enmEffOpSize)
3967 {
3968 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3969 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3970 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3972 }
3973 if (rcStrict != VINF_SUCCESS)
3974 return rcStrict;
3975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3976
3977 /* Perform the operation - this should be CImpl. */
3978 RTUINT64U TmpRsp;
3979 TmpRsp.u = pCtx->rsp;
3980 switch (pVCpu->iem.s.enmEffOpSize)
3981 {
3982 case IEMMODE_16BIT:
3983 {
3984 uint16_t u16Value;
3985 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3986 if (rcStrict == VINF_SUCCESS)
3987 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3988 break;
3989 }
3990
3991 case IEMMODE_32BIT:
3992 {
3993 uint32_t u32Value;
3994 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3995 if (rcStrict == VINF_SUCCESS)
3996 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3997 break;
3998 }
3999
4000 case IEMMODE_64BIT:
4001 {
4002 uint64_t u64Value;
4003 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4004 if (rcStrict == VINF_SUCCESS)
4005 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4006 break;
4007 }
4008
4009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4010 }
4011 if (rcStrict == VINF_SUCCESS)
4012 {
4013 pCtx->rsp = TmpRsp.u;
4014 iemRegUpdateRipAndClearRF(pVCpu);
4015 }
4016 return rcStrict;
4017
4018#else
4019 return VERR_IEM_IPE_2;
4020#endif
4021}
4022
4023
4024/**
4025 * @opcode 0x8f
4026 */
4027FNIEMOP_DEF(iemOp_Grp1A__xop)
4028{
4029 /*
4030 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4031 * three byte VEX prefix, except that the mmmmm field cannot have the values
4032 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4033 */
4034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4035 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4036 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4037
4038 IEMOP_MNEMONIC(xop, "xop");
4039 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4040 {
4041 /** @todo Test when exctly the XOP conformance checks kick in during
4042 * instruction decoding and fetching (using \#PF). */
4043 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4044 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4045 if ( ( pVCpu->iem.s.fPrefixes
4046 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4047 == 0)
4048 {
4049 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4050 if (bXop2 & 0x80 /* XOP.W */)
4051 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4052 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4053 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4054 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4055 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4056 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4057 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4058
4059 /** @todo XOP: Just use new tables and decoders. */
4060 switch (bRm & 0x1f)
4061 {
4062 case 8: /* xop opcode map 8. */
4063 IEMOP_BITCH_ABOUT_STUB();
4064 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4065
4066 case 9: /* xop opcode map 9. */
4067 IEMOP_BITCH_ABOUT_STUB();
4068 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4069
4070 case 10: /* xop opcode map 10. */
4071 IEMOP_BITCH_ABOUT_STUB();
4072 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4073
4074 default:
4075 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4076 return IEMOP_RAISE_INVALID_OPCODE();
4077 }
4078 }
4079 else
4080 Log(("XOP: Invalid prefix mix!\n"));
4081 }
4082 else
4083 Log(("XOP: XOP support disabled!\n"));
4084 return IEMOP_RAISE_INVALID_OPCODE();
4085}
4086
4087
4088/**
4089 * Common 'xchg reg,rAX' helper.
4090 */
4091FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4092{
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4094
4095 iReg |= pVCpu->iem.s.uRexB;
4096 switch (pVCpu->iem.s.enmEffOpSize)
4097 {
4098 case IEMMODE_16BIT:
4099 IEM_MC_BEGIN(0, 2);
4100 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4101 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4102 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4103 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4104 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4105 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4106 IEM_MC_ADVANCE_RIP();
4107 IEM_MC_END();
4108 return VINF_SUCCESS;
4109
4110 case IEMMODE_32BIT:
4111 IEM_MC_BEGIN(0, 2);
4112 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4113 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4114 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4115 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4116 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4117 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4118 IEM_MC_ADVANCE_RIP();
4119 IEM_MC_END();
4120 return VINF_SUCCESS;
4121
4122 case IEMMODE_64BIT:
4123 IEM_MC_BEGIN(0, 2);
4124 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4125 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4126 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4127 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4128 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4129 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 return VINF_SUCCESS;
4133
4134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4135 }
4136}
4137
4138
4139/**
4140 * @opcode 0x90
4141 */
4142FNIEMOP_DEF(iemOp_nop)
4143{
4144 /* R8/R8D and RAX/EAX can be exchanged. */
4145 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4146 {
4147 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4148 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4149 }
4150
4151 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4152 IEMOP_MNEMONIC(pause, "pause");
4153 else
4154 IEMOP_MNEMONIC(nop, "nop");
4155 IEM_MC_BEGIN(0, 0);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159}
4160
4161
4162/**
4163 * @opcode 0x91
4164 */
4165FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4166{
4167 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4168 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4169}
4170
4171
4172/**
4173 * @opcode 0x92
4174 */
4175FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4176{
4177 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4178 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4179}
4180
4181
4182/**
4183 * @opcode 0x93
4184 */
4185FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4186{
4187 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4188 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4189}
4190
4191
4192/**
4193 * @opcode 0x94
4194 */
4195FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4196{
4197 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4198 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4199}
4200
4201
4202/**
4203 * @opcode 0x95
4204 */
4205FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4206{
4207 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4208 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4209}
4210
4211
4212/**
4213 * @opcode 0x96
4214 */
4215FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4216{
4217 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4218 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4219}
4220
4221
4222/**
4223 * @opcode 0x97
4224 */
4225FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4226{
4227 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4228 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4229}
4230
4231
4232/**
4233 * @opcode 0x98
4234 */
4235FNIEMOP_DEF(iemOp_cbw)
4236{
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238 switch (pVCpu->iem.s.enmEffOpSize)
4239 {
4240 case IEMMODE_16BIT:
4241 IEMOP_MNEMONIC(cbw, "cbw");
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4244 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4245 } IEM_MC_ELSE() {
4246 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4247 } IEM_MC_ENDIF();
4248 IEM_MC_ADVANCE_RIP();
4249 IEM_MC_END();
4250 return VINF_SUCCESS;
4251
4252 case IEMMODE_32BIT:
4253 IEMOP_MNEMONIC(cwde, "cwde");
4254 IEM_MC_BEGIN(0, 1);
4255 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4256 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4257 } IEM_MC_ELSE() {
4258 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4259 } IEM_MC_ENDIF();
4260 IEM_MC_ADVANCE_RIP();
4261 IEM_MC_END();
4262 return VINF_SUCCESS;
4263
4264 case IEMMODE_64BIT:
4265 IEMOP_MNEMONIC(cdqe, "cdqe");
4266 IEM_MC_BEGIN(0, 1);
4267 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4268 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4269 } IEM_MC_ELSE() {
4270 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4271 } IEM_MC_ENDIF();
4272 IEM_MC_ADVANCE_RIP();
4273 IEM_MC_END();
4274 return VINF_SUCCESS;
4275
4276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4277 }
4278}
4279
4280
4281/**
4282 * @opcode 0x99
4283 */
4284FNIEMOP_DEF(iemOp_cwd)
4285{
4286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4287 switch (pVCpu->iem.s.enmEffOpSize)
4288 {
4289 case IEMMODE_16BIT:
4290 IEMOP_MNEMONIC(cwd, "cwd");
4291 IEM_MC_BEGIN(0, 1);
4292 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4293 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4294 } IEM_MC_ELSE() {
4295 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4296 } IEM_MC_ENDIF();
4297 IEM_MC_ADVANCE_RIP();
4298 IEM_MC_END();
4299 return VINF_SUCCESS;
4300
4301 case IEMMODE_32BIT:
4302 IEMOP_MNEMONIC(cdq, "cdq");
4303 IEM_MC_BEGIN(0, 1);
4304 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4305 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4306 } IEM_MC_ELSE() {
4307 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4308 } IEM_MC_ENDIF();
4309 IEM_MC_ADVANCE_RIP();
4310 IEM_MC_END();
4311 return VINF_SUCCESS;
4312
4313 case IEMMODE_64BIT:
4314 IEMOP_MNEMONIC(cqo, "cqo");
4315 IEM_MC_BEGIN(0, 1);
4316 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4317 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4318 } IEM_MC_ELSE() {
4319 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4320 } IEM_MC_ENDIF();
4321 IEM_MC_ADVANCE_RIP();
4322 IEM_MC_END();
4323 return VINF_SUCCESS;
4324
4325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4326 }
4327}
4328
4329
4330/**
4331 * @opcode 0x9a
4332 */
4333FNIEMOP_DEF(iemOp_call_Ap)
4334{
4335 IEMOP_MNEMONIC(call_Ap, "call Ap");
4336 IEMOP_HLP_NO_64BIT();
4337
4338 /* Decode the far pointer address and pass it on to the far call C implementation. */
4339 uint32_t offSeg;
4340 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4341 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4342 else
4343 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4344 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4347}
4348
4349
4350/** Opcode 0x9b. (aka fwait) */
4351FNIEMOP_DEF(iemOp_wait)
4352{
4353 IEMOP_MNEMONIC(wait, "wait");
4354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4355
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4359 IEM_MC_ADVANCE_RIP();
4360 IEM_MC_END();
4361 return VINF_SUCCESS;
4362}
4363
4364
4365/**
4366 * @opcode 0x9c
4367 */
4368FNIEMOP_DEF(iemOp_pushf_Fv)
4369{
4370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4371 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4372 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4373}
4374
4375
4376/**
4377 * @opcode 0x9d
4378 */
4379FNIEMOP_DEF(iemOp_popf_Fv)
4380{
4381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4383 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4384}
4385
4386
4387/**
4388 * @opcode 0x9e
4389 */
4390FNIEMOP_DEF(iemOp_sahf)
4391{
4392 IEMOP_MNEMONIC(sahf, "sahf");
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4394 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4395 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4396 return IEMOP_RAISE_INVALID_OPCODE();
4397 IEM_MC_BEGIN(0, 2);
4398 IEM_MC_LOCAL(uint32_t, u32Flags);
4399 IEM_MC_LOCAL(uint32_t, EFlags);
4400 IEM_MC_FETCH_EFLAGS(EFlags);
4401 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4402 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4403 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4404 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4405 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4406 IEM_MC_COMMIT_EFLAGS(EFlags);
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 return VINF_SUCCESS;
4410}
4411
4412
4413/**
4414 * @opcode 0x9f
4415 */
4416FNIEMOP_DEF(iemOp_lahf)
4417{
4418 IEMOP_MNEMONIC(lahf, "lahf");
4419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4420 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4421 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4422 return IEMOP_RAISE_INVALID_OPCODE();
4423 IEM_MC_BEGIN(0, 1);
4424 IEM_MC_LOCAL(uint8_t, u8Flags);
4425 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4426 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4427 IEM_MC_ADVANCE_RIP();
4428 IEM_MC_END();
4429 return VINF_SUCCESS;
4430}
4431
4432
4433/**
4434 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4435 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4436 * prefixes. Will return on failures.
4437 * @param a_GCPtrMemOff The variable to store the offset in.
4438 */
4439#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4440 do \
4441 { \
4442 switch (pVCpu->iem.s.enmEffAddrMode) \
4443 { \
4444 case IEMMODE_16BIT: \
4445 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4446 break; \
4447 case IEMMODE_32BIT: \
4448 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4449 break; \
4450 case IEMMODE_64BIT: \
4451 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4452 break; \
4453 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4454 } \
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4456 } while (0)
4457
4458/**
4459 * @opcode 0xa0
4460 */
4461FNIEMOP_DEF(iemOp_mov_AL_Ob)
4462{
4463 /*
4464 * Get the offset and fend of lock prefixes.
4465 */
4466 RTGCPTR GCPtrMemOff;
4467 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4468
4469 /*
4470 * Fetch AL.
4471 */
4472 IEM_MC_BEGIN(0,1);
4473 IEM_MC_LOCAL(uint8_t, u8Tmp);
4474 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4475 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4476 IEM_MC_ADVANCE_RIP();
4477 IEM_MC_END();
4478 return VINF_SUCCESS;
4479}
4480
4481
4482/**
4483 * @opcode 0xa1
4484 */
4485FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4486{
4487 /*
4488 * Get the offset and fend of lock prefixes.
4489 */
4490 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4491 RTGCPTR GCPtrMemOff;
4492 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4493
4494 /*
4495 * Fetch rAX.
4496 */
4497 switch (pVCpu->iem.s.enmEffOpSize)
4498 {
4499 case IEMMODE_16BIT:
4500 IEM_MC_BEGIN(0,1);
4501 IEM_MC_LOCAL(uint16_t, u16Tmp);
4502 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4503 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4504 IEM_MC_ADVANCE_RIP();
4505 IEM_MC_END();
4506 return VINF_SUCCESS;
4507
4508 case IEMMODE_32BIT:
4509 IEM_MC_BEGIN(0,1);
4510 IEM_MC_LOCAL(uint32_t, u32Tmp);
4511 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4512 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4513 IEM_MC_ADVANCE_RIP();
4514 IEM_MC_END();
4515 return VINF_SUCCESS;
4516
4517 case IEMMODE_64BIT:
4518 IEM_MC_BEGIN(0,1);
4519 IEM_MC_LOCAL(uint64_t, u64Tmp);
4520 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4521 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4522 IEM_MC_ADVANCE_RIP();
4523 IEM_MC_END();
4524 return VINF_SUCCESS;
4525
4526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4527 }
4528}
4529
4530
4531/**
4532 * @opcode 0xa2
4533 */
4534FNIEMOP_DEF(iemOp_mov_Ob_AL)
4535{
4536 /*
4537 * Get the offset and fend of lock prefixes.
4538 */
4539 RTGCPTR GCPtrMemOff;
4540 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4541
4542 /*
4543 * Store AL.
4544 */
4545 IEM_MC_BEGIN(0,1);
4546 IEM_MC_LOCAL(uint8_t, u8Tmp);
4547 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4548 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 return VINF_SUCCESS;
4552}
4553
4554
4555/**
4556 * @opcode 0xa3
4557 */
4558FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4559{
4560 /*
4561 * Get the offset and fend of lock prefixes.
4562 */
4563 RTGCPTR GCPtrMemOff;
4564 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4565
4566 /*
4567 * Store rAX.
4568 */
4569 switch (pVCpu->iem.s.enmEffOpSize)
4570 {
4571 case IEMMODE_16BIT:
4572 IEM_MC_BEGIN(0,1);
4573 IEM_MC_LOCAL(uint16_t, u16Tmp);
4574 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4575 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 return VINF_SUCCESS;
4579
4580 case IEMMODE_32BIT:
4581 IEM_MC_BEGIN(0,1);
4582 IEM_MC_LOCAL(uint32_t, u32Tmp);
4583 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4584 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4585 IEM_MC_ADVANCE_RIP();
4586 IEM_MC_END();
4587 return VINF_SUCCESS;
4588
4589 case IEMMODE_64BIT:
4590 IEM_MC_BEGIN(0,1);
4591 IEM_MC_LOCAL(uint64_t, u64Tmp);
4592 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4593 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4594 IEM_MC_ADVANCE_RIP();
4595 IEM_MC_END();
4596 return VINF_SUCCESS;
4597
4598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4599 }
4600}
4601
4602/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4603#define IEM_MOVS_CASE(ValBits, AddrBits) \
4604 IEM_MC_BEGIN(0, 2); \
4605 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4606 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4607 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4608 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4609 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4610 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4612 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4613 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4614 } IEM_MC_ELSE() { \
4615 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4616 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4617 } IEM_MC_ENDIF(); \
4618 IEM_MC_ADVANCE_RIP(); \
4619 IEM_MC_END();
4620
4621/**
4622 * @opcode 0xa4
4623 */
4624FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4625{
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627
4628 /*
4629 * Use the C implementation if a repeat prefix is encountered.
4630 */
4631 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4632 {
4633 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4634 switch (pVCpu->iem.s.enmEffAddrMode)
4635 {
4636 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4637 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4638 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4640 }
4641 }
4642 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4643
4644 /*
4645 * Sharing case implementation with movs[wdq] below.
4646 */
4647 switch (pVCpu->iem.s.enmEffAddrMode)
4648 {
4649 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4650 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4651 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4653 }
4654 return VINF_SUCCESS;
4655}
4656
4657
4658/**
4659 * @opcode 0xa5
4660 */
4661FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4662{
4663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4664
4665 /*
4666 * Use the C implementation if a repeat prefix is encountered.
4667 */
4668 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4669 {
4670 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4671 switch (pVCpu->iem.s.enmEffOpSize)
4672 {
4673 case IEMMODE_16BIT:
4674 switch (pVCpu->iem.s.enmEffAddrMode)
4675 {
4676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4680 }
4681 break;
4682 case IEMMODE_32BIT:
4683 switch (pVCpu->iem.s.enmEffAddrMode)
4684 {
4685 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4689 }
4690 case IEMMODE_64BIT:
4691 switch (pVCpu->iem.s.enmEffAddrMode)
4692 {
4693 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4697 }
4698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4699 }
4700 }
4701 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4702
4703 /*
4704 * Annoying double switch here.
4705 * Using ugly macro for implementing the cases, sharing it with movsb.
4706 */
4707 switch (pVCpu->iem.s.enmEffOpSize)
4708 {
4709 case IEMMODE_16BIT:
4710 switch (pVCpu->iem.s.enmEffAddrMode)
4711 {
4712 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4713 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4714 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4716 }
4717 break;
4718
4719 case IEMMODE_32BIT:
4720 switch (pVCpu->iem.s.enmEffAddrMode)
4721 {
4722 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4723 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4724 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4726 }
4727 break;
4728
4729 case IEMMODE_64BIT:
4730 switch (pVCpu->iem.s.enmEffAddrMode)
4731 {
4732 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4733 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4734 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4736 }
4737 break;
4738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4739 }
4740 return VINF_SUCCESS;
4741}
4742
4743#undef IEM_MOVS_CASE
4744
4745/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4746#define IEM_CMPS_CASE(ValBits, AddrBits) \
4747 IEM_MC_BEGIN(3, 3); \
4748 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4749 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4750 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4751 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4752 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4753 \
4754 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4755 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4756 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4757 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4758 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4759 IEM_MC_REF_EFLAGS(pEFlags); \
4760 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4761 \
4762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4763 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4764 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4765 } IEM_MC_ELSE() { \
4766 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4767 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4768 } IEM_MC_ENDIF(); \
4769 IEM_MC_ADVANCE_RIP(); \
4770 IEM_MC_END(); \
4771
4772/**
4773 * @opcode 0xa6
4774 */
4775FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4776{
4777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4778
4779 /*
4780 * Use the C implementation if a repeat prefix is encountered.
4781 */
4782 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4783 {
4784 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4785 switch (pVCpu->iem.s.enmEffAddrMode)
4786 {
4787 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4788 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4789 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4791 }
4792 }
4793 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4794 {
4795 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4796 switch (pVCpu->iem.s.enmEffAddrMode)
4797 {
4798 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4799 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4800 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4802 }
4803 }
4804 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4805
4806 /*
4807 * Sharing case implementation with cmps[wdq] below.
4808 */
4809 switch (pVCpu->iem.s.enmEffAddrMode)
4810 {
4811 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4812 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4813 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4815 }
4816 return VINF_SUCCESS;
4817
4818}
4819
4820
4821/**
4822 * @opcode 0xa7
4823 */
4824FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4825{
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827
4828 /*
4829 * Use the C implementation if a repeat prefix is encountered.
4830 */
4831 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4832 {
4833 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4834 switch (pVCpu->iem.s.enmEffOpSize)
4835 {
4836 case IEMMODE_16BIT:
4837 switch (pVCpu->iem.s.enmEffAddrMode)
4838 {
4839 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4840 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4841 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4843 }
4844 break;
4845 case IEMMODE_32BIT:
4846 switch (pVCpu->iem.s.enmEffAddrMode)
4847 {
4848 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4849 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4850 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4852 }
4853 case IEMMODE_64BIT:
4854 switch (pVCpu->iem.s.enmEffAddrMode)
4855 {
4856 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4857 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4858 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4860 }
4861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4862 }
4863 }
4864
4865 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4866 {
4867 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4868 switch (pVCpu->iem.s.enmEffOpSize)
4869 {
4870 case IEMMODE_16BIT:
4871 switch (pVCpu->iem.s.enmEffAddrMode)
4872 {
4873 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4874 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4875 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4877 }
4878 break;
4879 case IEMMODE_32BIT:
4880 switch (pVCpu->iem.s.enmEffAddrMode)
4881 {
4882 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4883 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4884 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4886 }
4887 case IEMMODE_64BIT:
4888 switch (pVCpu->iem.s.enmEffAddrMode)
4889 {
4890 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4891 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4892 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4894 }
4895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4896 }
4897 }
4898
4899 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4900
4901 /*
4902 * Annoying double switch here.
4903 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4904 */
4905 switch (pVCpu->iem.s.enmEffOpSize)
4906 {
4907 case IEMMODE_16BIT:
4908 switch (pVCpu->iem.s.enmEffAddrMode)
4909 {
4910 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4911 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4912 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4914 }
4915 break;
4916
4917 case IEMMODE_32BIT:
4918 switch (pVCpu->iem.s.enmEffAddrMode)
4919 {
4920 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4921 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4922 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4924 }
4925 break;
4926
4927 case IEMMODE_64BIT:
4928 switch (pVCpu->iem.s.enmEffAddrMode)
4929 {
4930 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4931 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4932 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4934 }
4935 break;
4936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4937 }
4938 return VINF_SUCCESS;
4939
4940}
4941
4942#undef IEM_CMPS_CASE
4943
4944/**
4945 * @opcode 0xa8
4946 */
4947FNIEMOP_DEF(iemOp_test_AL_Ib)
4948{
4949 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4952}
4953
4954
4955/**
4956 * @opcode 0xa9
4957 */
4958FNIEMOP_DEF(iemOp_test_eAX_Iz)
4959{
4960 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4962 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4963}
4964
4965
4966/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4967#define IEM_STOS_CASE(ValBits, AddrBits) \
4968 IEM_MC_BEGIN(0, 2); \
4969 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4970 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4971 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4972 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4973 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4974 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4975 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4976 } IEM_MC_ELSE() { \
4977 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4978 } IEM_MC_ENDIF(); \
4979 IEM_MC_ADVANCE_RIP(); \
4980 IEM_MC_END(); \
4981
4982/**
4983 * @opcode 0xaa
4984 */
4985FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4986{
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4988
4989 /*
4990 * Use the C implementation if a repeat prefix is encountered.
4991 */
4992 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4993 {
4994 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4995 switch (pVCpu->iem.s.enmEffAddrMode)
4996 {
4997 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4998 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4999 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5001 }
5002 }
5003 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5004
5005 /*
5006 * Sharing case implementation with stos[wdq] below.
5007 */
5008 switch (pVCpu->iem.s.enmEffAddrMode)
5009 {
5010 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5011 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5012 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5014 }
5015 return VINF_SUCCESS;
5016}
5017
5018
5019/**
5020 * @opcode 0xab
5021 */
5022FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5023{
5024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5025
5026 /*
5027 * Use the C implementation if a repeat prefix is encountered.
5028 */
5029 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5030 {
5031 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5032 switch (pVCpu->iem.s.enmEffOpSize)
5033 {
5034 case IEMMODE_16BIT:
5035 switch (pVCpu->iem.s.enmEffAddrMode)
5036 {
5037 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5041 }
5042 break;
5043 case IEMMODE_32BIT:
5044 switch (pVCpu->iem.s.enmEffAddrMode)
5045 {
5046 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5047 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5048 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5050 }
5051 case IEMMODE_64BIT:
5052 switch (pVCpu->iem.s.enmEffAddrMode)
5053 {
5054 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5055 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5056 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5058 }
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 }
5062 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5063
5064 /*
5065 * Annoying double switch here.
5066 * Using ugly macro for implementing the cases, sharing it with stosb.
5067 */
5068 switch (pVCpu->iem.s.enmEffOpSize)
5069 {
5070 case IEMMODE_16BIT:
5071 switch (pVCpu->iem.s.enmEffAddrMode)
5072 {
5073 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5074 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5075 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5077 }
5078 break;
5079
5080 case IEMMODE_32BIT:
5081 switch (pVCpu->iem.s.enmEffAddrMode)
5082 {
5083 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5084 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5085 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5087 }
5088 break;
5089
5090 case IEMMODE_64BIT:
5091 switch (pVCpu->iem.s.enmEffAddrMode)
5092 {
5093 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5094 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5095 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5097 }
5098 break;
5099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5100 }
5101 return VINF_SUCCESS;
5102}
5103
5104#undef IEM_STOS_CASE
5105
5106/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5107#define IEM_LODS_CASE(ValBits, AddrBits) \
5108 IEM_MC_BEGIN(0, 2); \
5109 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5110 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5111 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5112 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5113 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5115 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5116 } IEM_MC_ELSE() { \
5117 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5118 } IEM_MC_ENDIF(); \
5119 IEM_MC_ADVANCE_RIP(); \
5120 IEM_MC_END();
5121
5122/**
5123 * @opcode 0xac
5124 */
5125FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5126{
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128
5129 /*
5130 * Use the C implementation if a repeat prefix is encountered.
5131 */
5132 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5133 {
5134 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5135 switch (pVCpu->iem.s.enmEffAddrMode)
5136 {
5137 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5138 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5139 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5141 }
5142 }
5143 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5144
5145 /*
5146 * Sharing case implementation with stos[wdq] below.
5147 */
5148 switch (pVCpu->iem.s.enmEffAddrMode)
5149 {
5150 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5151 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5152 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5154 }
5155 return VINF_SUCCESS;
5156}
5157
5158
5159/**
5160 * @opcode 0xad
5161 */
5162FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5163{
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165
5166 /*
5167 * Use the C implementation if a repeat prefix is encountered.
5168 */
5169 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5170 {
5171 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5172 switch (pVCpu->iem.s.enmEffOpSize)
5173 {
5174 case IEMMODE_16BIT:
5175 switch (pVCpu->iem.s.enmEffAddrMode)
5176 {
5177 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5181 }
5182 break;
5183 case IEMMODE_32BIT:
5184 switch (pVCpu->iem.s.enmEffAddrMode)
5185 {
5186 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5187 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5188 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5190 }
5191 case IEMMODE_64BIT:
5192 switch (pVCpu->iem.s.enmEffAddrMode)
5193 {
5194 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5195 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5196 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5198 }
5199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5200 }
5201 }
5202 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5203
5204 /*
5205 * Annoying double switch here.
5206 * Using ugly macro for implementing the cases, sharing it with lodsb.
5207 */
5208 switch (pVCpu->iem.s.enmEffOpSize)
5209 {
5210 case IEMMODE_16BIT:
5211 switch (pVCpu->iem.s.enmEffAddrMode)
5212 {
5213 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5214 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5215 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5217 }
5218 break;
5219
5220 case IEMMODE_32BIT:
5221 switch (pVCpu->iem.s.enmEffAddrMode)
5222 {
5223 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5224 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5225 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5227 }
5228 break;
5229
5230 case IEMMODE_64BIT:
5231 switch (pVCpu->iem.s.enmEffAddrMode)
5232 {
5233 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5234 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5235 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5237 }
5238 break;
5239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5240 }
5241 return VINF_SUCCESS;
5242}
5243
5244#undef IEM_LODS_CASE
5245
5246/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5247#define IEM_SCAS_CASE(ValBits, AddrBits) \
5248 IEM_MC_BEGIN(3, 2); \
5249 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5250 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5251 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5252 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5253 \
5254 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5255 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5256 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5257 IEM_MC_REF_EFLAGS(pEFlags); \
5258 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5259 \
5260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5261 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5262 } IEM_MC_ELSE() { \
5263 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5264 } IEM_MC_ENDIF(); \
5265 IEM_MC_ADVANCE_RIP(); \
5266 IEM_MC_END();
5267
5268/**
5269 * @opcode 0xae
5270 */
5271FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5272{
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274
5275 /*
5276 * Use the C implementation if a repeat prefix is encountered.
5277 */
5278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5279 {
5280 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5281 switch (pVCpu->iem.s.enmEffAddrMode)
5282 {
5283 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5284 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5285 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5287 }
5288 }
5289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5290 {
5291 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5292 switch (pVCpu->iem.s.enmEffAddrMode)
5293 {
5294 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5295 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5296 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5298 }
5299 }
5300 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5301
5302 /*
5303 * Sharing case implementation with stos[wdq] below.
5304 */
5305 switch (pVCpu->iem.s.enmEffAddrMode)
5306 {
5307 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5308 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5309 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5311 }
5312 return VINF_SUCCESS;
5313}
5314
5315
5316/**
5317 * @opcode 0xaf
5318 */
5319FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5320{
5321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5322
5323 /*
5324 * Use the C implementation if a repeat prefix is encountered.
5325 */
5326 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5327 {
5328 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5329 switch (pVCpu->iem.s.enmEffOpSize)
5330 {
5331 case IEMMODE_16BIT:
5332 switch (pVCpu->iem.s.enmEffAddrMode)
5333 {
5334 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5335 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5336 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5338 }
5339 break;
5340 case IEMMODE_32BIT:
5341 switch (pVCpu->iem.s.enmEffAddrMode)
5342 {
5343 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5344 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5345 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5347 }
5348 case IEMMODE_64BIT:
5349 switch (pVCpu->iem.s.enmEffAddrMode)
5350 {
5351 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5352 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5353 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5355 }
5356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5357 }
5358 }
5359 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5360 {
5361 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5362 switch (pVCpu->iem.s.enmEffOpSize)
5363 {
5364 case IEMMODE_16BIT:
5365 switch (pVCpu->iem.s.enmEffAddrMode)
5366 {
5367 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5368 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5369 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5371 }
5372 break;
5373 case IEMMODE_32BIT:
5374 switch (pVCpu->iem.s.enmEffAddrMode)
5375 {
5376 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5377 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5378 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5380 }
5381 case IEMMODE_64BIT:
5382 switch (pVCpu->iem.s.enmEffAddrMode)
5383 {
5384 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5388 }
5389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5390 }
5391 }
5392 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5393
5394 /*
5395 * Annoying double switch here.
5396 * Using ugly macro for implementing the cases, sharing it with scasb.
5397 */
5398 switch (pVCpu->iem.s.enmEffOpSize)
5399 {
5400 case IEMMODE_16BIT:
5401 switch (pVCpu->iem.s.enmEffAddrMode)
5402 {
5403 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5404 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5405 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5407 }
5408 break;
5409
5410 case IEMMODE_32BIT:
5411 switch (pVCpu->iem.s.enmEffAddrMode)
5412 {
5413 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5414 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5415 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5417 }
5418 break;
5419
5420 case IEMMODE_64BIT:
5421 switch (pVCpu->iem.s.enmEffAddrMode)
5422 {
5423 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5424 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5425 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5427 }
5428 break;
5429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5430 }
5431 return VINF_SUCCESS;
5432}
5433
5434#undef IEM_SCAS_CASE
5435
5436/**
5437 * Common 'mov r8, imm8' helper.
5438 */
5439FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5440{
5441 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5443
5444 IEM_MC_BEGIN(0, 1);
5445 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5446 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5447 IEM_MC_ADVANCE_RIP();
5448 IEM_MC_END();
5449
5450 return VINF_SUCCESS;
5451}
5452
5453
5454/**
5455 * @opcode 0xb0
5456 */
5457FNIEMOP_DEF(iemOp_mov_AL_Ib)
5458{
5459 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5460 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5461}
5462
5463
5464/**
5465 * @opcode 0xb1
5466 */
5467FNIEMOP_DEF(iemOp_CL_Ib)
5468{
5469 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5470 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5471}
5472
5473
5474/**
5475 * @opcode 0xb2
5476 */
5477FNIEMOP_DEF(iemOp_DL_Ib)
5478{
5479 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5480 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5481}
5482
5483
5484/**
5485 * @opcode 0xb3
5486 */
5487FNIEMOP_DEF(iemOp_BL_Ib)
5488{
5489 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5490 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5491}
5492
5493
5494/**
5495 * @opcode 0xb4
5496 */
5497FNIEMOP_DEF(iemOp_mov_AH_Ib)
5498{
5499 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5500 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5501}
5502
5503
5504/**
5505 * @opcode 0xb5
5506 */
5507FNIEMOP_DEF(iemOp_CH_Ib)
5508{
5509 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5510 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5511}
5512
5513
5514/**
5515 * @opcode 0xb6
5516 */
5517FNIEMOP_DEF(iemOp_DH_Ib)
5518{
5519 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5520 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5521}
5522
5523
5524/**
5525 * @opcode 0xb7
5526 */
5527FNIEMOP_DEF(iemOp_BH_Ib)
5528{
5529 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5530 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5531}
5532
5533
5534/**
5535 * Common 'mov regX,immX' helper.
5536 */
5537FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5538{
5539 switch (pVCpu->iem.s.enmEffOpSize)
5540 {
5541 case IEMMODE_16BIT:
5542 {
5543 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545
5546 IEM_MC_BEGIN(0, 1);
5547 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5548 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5549 IEM_MC_ADVANCE_RIP();
5550 IEM_MC_END();
5551 break;
5552 }
5553
5554 case IEMMODE_32BIT:
5555 {
5556 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558
5559 IEM_MC_BEGIN(0, 1);
5560 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5561 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5562 IEM_MC_ADVANCE_RIP();
5563 IEM_MC_END();
5564 break;
5565 }
5566 case IEMMODE_64BIT:
5567 {
5568 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570
5571 IEM_MC_BEGIN(0, 1);
5572 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5573 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5574 IEM_MC_ADVANCE_RIP();
5575 IEM_MC_END();
5576 break;
5577 }
5578 }
5579
5580 return VINF_SUCCESS;
5581}
5582
5583
5584/**
5585 * @opcode 0xb8
5586 */
5587FNIEMOP_DEF(iemOp_eAX_Iv)
5588{
5589 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5590 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5591}
5592
5593
5594/**
5595 * @opcode 0xb9
5596 */
5597FNIEMOP_DEF(iemOp_eCX_Iv)
5598{
5599 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5600 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5601}
5602
5603
5604/**
5605 * @opcode 0xba
5606 */
5607FNIEMOP_DEF(iemOp_eDX_Iv)
5608{
5609 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5610 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5611}
5612
5613
5614/**
5615 * @opcode 0xbb
5616 */
5617FNIEMOP_DEF(iemOp_eBX_Iv)
5618{
5619 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5620 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5621}
5622
5623
5624/**
5625 * @opcode 0xbc
5626 */
5627FNIEMOP_DEF(iemOp_eSP_Iv)
5628{
5629 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5630 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5631}
5632
5633
5634/**
5635 * @opcode 0xbd
5636 */
5637FNIEMOP_DEF(iemOp_eBP_Iv)
5638{
5639 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5640 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5641}
5642
5643
5644/**
5645 * @opcode 0xbe
5646 */
5647FNIEMOP_DEF(iemOp_eSI_Iv)
5648{
5649 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5650 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5651}
5652
5653
5654/**
5655 * @opcode 0xbf
5656 */
5657FNIEMOP_DEF(iemOp_eDI_Iv)
5658{
5659 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5660 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5661}
5662
5663
5664/**
5665 * @opcode 0xc0
5666 */
5667FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5668{
5669 IEMOP_HLP_MIN_186();
5670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5671 PCIEMOPSHIFTSIZES pImpl;
5672 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5673 {
5674 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5675 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5676 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5677 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5678 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5679 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5680 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5681 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5682 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5683 }
5684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5685
5686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5687 {
5688 /* register */
5689 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5691 IEM_MC_BEGIN(3, 0);
5692 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5693 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5695 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5698 IEM_MC_ADVANCE_RIP();
5699 IEM_MC_END();
5700 }
5701 else
5702 {
5703 /* memory */
5704 IEM_MC_BEGIN(3, 2);
5705 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5706 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5707 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5709
5710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5711 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5712 IEM_MC_ASSIGN(cShiftArg, cShift);
5713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5714 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5715 IEM_MC_FETCH_EFLAGS(EFlags);
5716 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5717
5718 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5719 IEM_MC_COMMIT_EFLAGS(EFlags);
5720 IEM_MC_ADVANCE_RIP();
5721 IEM_MC_END();
5722 }
5723 return VINF_SUCCESS;
5724}
5725
5726
5727/**
5728 * @opcode 0xc1
5729 */
5730FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5731{
5732 IEMOP_HLP_MIN_186();
5733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5734 PCIEMOPSHIFTSIZES pImpl;
5735 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5736 {
5737 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5738 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5739 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5740 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5741 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5742 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5743 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5744 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5746 }
5747 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5748
5749 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5750 {
5751 /* register */
5752 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5754 switch (pVCpu->iem.s.enmEffOpSize)
5755 {
5756 case IEMMODE_16BIT:
5757 IEM_MC_BEGIN(3, 0);
5758 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5759 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5760 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5761 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5762 IEM_MC_REF_EFLAGS(pEFlags);
5763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 return VINF_SUCCESS;
5767
5768 case IEMMODE_32BIT:
5769 IEM_MC_BEGIN(3, 0);
5770 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5771 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5773 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5774 IEM_MC_REF_EFLAGS(pEFlags);
5775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5776 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5777 IEM_MC_ADVANCE_RIP();
5778 IEM_MC_END();
5779 return VINF_SUCCESS;
5780
5781 case IEMMODE_64BIT:
5782 IEM_MC_BEGIN(3, 0);
5783 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5784 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5785 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5786 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5787 IEM_MC_REF_EFLAGS(pEFlags);
5788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5789 IEM_MC_ADVANCE_RIP();
5790 IEM_MC_END();
5791 return VINF_SUCCESS;
5792
5793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5794 }
5795 }
5796 else
5797 {
5798 /* memory */
5799 switch (pVCpu->iem.s.enmEffOpSize)
5800 {
5801 case IEMMODE_16BIT:
5802 IEM_MC_BEGIN(3, 2);
5803 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5804 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5805 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5807
5808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5809 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5810 IEM_MC_ASSIGN(cShiftArg, cShift);
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5813 IEM_MC_FETCH_EFLAGS(EFlags);
5814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5815
5816 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5817 IEM_MC_COMMIT_EFLAGS(EFlags);
5818 IEM_MC_ADVANCE_RIP();
5819 IEM_MC_END();
5820 return VINF_SUCCESS;
5821
5822 case IEMMODE_32BIT:
5823 IEM_MC_BEGIN(3, 2);
5824 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5825 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5826 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5828
5829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5830 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5831 IEM_MC_ASSIGN(cShiftArg, cShift);
5832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5833 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5834 IEM_MC_FETCH_EFLAGS(EFlags);
5835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5836
5837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5838 IEM_MC_COMMIT_EFLAGS(EFlags);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 case IEMMODE_64BIT:
5844 IEM_MC_BEGIN(3, 2);
5845 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5846 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5847 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5849
5850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5851 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5852 IEM_MC_ASSIGN(cShiftArg, cShift);
5853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5854 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5855 IEM_MC_FETCH_EFLAGS(EFlags);
5856 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5857
5858 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5859 IEM_MC_COMMIT_EFLAGS(EFlags);
5860 IEM_MC_ADVANCE_RIP();
5861 IEM_MC_END();
5862 return VINF_SUCCESS;
5863
5864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5865 }
5866 }
5867}
5868
5869
5870/**
5871 * @opcode 0xc2
5872 */
5873FNIEMOP_DEF(iemOp_retn_Iw)
5874{
5875 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5879 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5880}
5881
5882
5883/**
5884 * @opcode 0xc3
5885 */
5886FNIEMOP_DEF(iemOp_retn)
5887{
5888 IEMOP_MNEMONIC(retn, "retn");
5889 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5892}
5893
5894
5895/**
5896 * @opcode 0xc4
5897 */
5898FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5899{
5900 /* The LES instruction is invalid 64-bit mode. In legacy and
5901 compatability mode it is invalid with MOD=3.
5902 The use as a VEX prefix is made possible by assigning the inverted
5903 REX.R to the top MOD bit, and the top bit in the inverted register
5904 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5905 to accessing registers 0..7 in this VEX form. */
5906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5907 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5908 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5909 {
5910 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5911 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5912 {
5913 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5914 if ( ( pVCpu->iem.s.fPrefixes
5915 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5916 == 0)
5917 {
5918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5919 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5920 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5921 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5922 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5923
5924 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5925 }
5926
5927 Log(("VEX2: Invalid prefix mix!\n"));
5928 }
5929 else
5930 Log(("VEX2: AVX support disabled!\n"));
5931
5932 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5933 return IEMOP_RAISE_INVALID_OPCODE();
5934 }
5935 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5936 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5937}
5938
5939
5940/**
5941 * @opcode 0xc5
5942 */
5943FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5944{
5945 /* The LDS instruction is invalid 64-bit mode. In legacy and
5946 compatability mode it is invalid with MOD=3.
5947 The use as a VEX prefix is made possible by assigning the inverted
5948 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5949 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5952 {
5953 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5954 {
5955 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5956 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5957 }
5958 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5959 }
5960
5961 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5962 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5963 {
5964 /** @todo Test when exctly the VEX conformance checks kick in during
5965 * instruction decoding and fetching (using \#PF). */
5966 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5967 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5968 if ( ( pVCpu->iem.s.fPrefixes
5969 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5970 == 0)
5971 {
5972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5973 if (bVex2 & 0x80 /* VEX.W */)
5974 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5975 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5976 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5977 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5978 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5979 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5980 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5981
5982 switch (bRm & 0x1f)
5983 {
5984 case 1: /* 0x0f lead opcode byte. */
5985 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5986
5987 case 2: /* 0x0f 0x38 lead opcode bytes. */
5988 /** @todo VEX: Just use new tables and decoders. */
5989 IEMOP_BITCH_ABOUT_STUB();
5990 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5991
5992 case 3: /* 0x0f 0x3a lead opcode bytes. */
5993 /** @todo VEX: Just use new tables and decoders. */
5994 IEMOP_BITCH_ABOUT_STUB();
5995 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5996
5997 default:
5998 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5999 return IEMOP_RAISE_INVALID_OPCODE();
6000 }
6001 }
6002 else
6003 Log(("VEX3: Invalid prefix mix!\n"));
6004 }
6005 else
6006 Log(("VEX3: AVX support disabled!\n"));
6007 return IEMOP_RAISE_INVALID_OPCODE();
6008}
6009
6010
6011/**
6012 * @opcode 0xc6
6013 */
6014FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6015{
6016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6017 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6018 return IEMOP_RAISE_INVALID_OPCODE();
6019 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6020
6021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6022 {
6023 /* register access */
6024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6026 IEM_MC_BEGIN(0, 0);
6027 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6028 IEM_MC_ADVANCE_RIP();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* memory access. */
6034 IEM_MC_BEGIN(0, 1);
6035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6037 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6039 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6040 IEM_MC_ADVANCE_RIP();
6041 IEM_MC_END();
6042 }
6043 return VINF_SUCCESS;
6044}
6045
6046
6047/**
6048 * @opcode 0xc7
6049 */
6050FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6051{
6052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6053 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6054 return IEMOP_RAISE_INVALID_OPCODE();
6055 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6056
6057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6058 {
6059 /* register access */
6060 switch (pVCpu->iem.s.enmEffOpSize)
6061 {
6062 case IEMMODE_16BIT:
6063 IEM_MC_BEGIN(0, 0);
6064 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6067 IEM_MC_ADVANCE_RIP();
6068 IEM_MC_END();
6069 return VINF_SUCCESS;
6070
6071 case IEMMODE_32BIT:
6072 IEM_MC_BEGIN(0, 0);
6073 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6075 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 return VINF_SUCCESS;
6079
6080 case IEMMODE_64BIT:
6081 IEM_MC_BEGIN(0, 0);
6082 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6084 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6085 IEM_MC_ADVANCE_RIP();
6086 IEM_MC_END();
6087 return VINF_SUCCESS;
6088
6089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6090 }
6091 }
6092 else
6093 {
6094 /* memory access. */
6095 switch (pVCpu->iem.s.enmEffOpSize)
6096 {
6097 case IEMMODE_16BIT:
6098 IEM_MC_BEGIN(0, 1);
6099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6101 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6103 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 case IEMMODE_32BIT:
6109 IEM_MC_BEGIN(0, 1);
6110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6112 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6114 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6115 IEM_MC_ADVANCE_RIP();
6116 IEM_MC_END();
6117 return VINF_SUCCESS;
6118
6119 case IEMMODE_64BIT:
6120 IEM_MC_BEGIN(0, 1);
6121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6123 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6125 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6126 IEM_MC_ADVANCE_RIP();
6127 IEM_MC_END();
6128 return VINF_SUCCESS;
6129
6130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6131 }
6132 }
6133}
6134
6135
6136
6137
6138/**
6139 * @opcode 0xc8
6140 */
6141FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6142{
6143 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6144 IEMOP_HLP_MIN_186();
6145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6146 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6147 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6149 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6150}
6151
6152
6153/**
6154 * @opcode 0xc9
6155 */
6156FNIEMOP_DEF(iemOp_leave)
6157{
6158 IEMOP_MNEMONIC(leave, "leave");
6159 IEMOP_HLP_MIN_186();
6160 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6162 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6163}
6164
6165
6166/**
6167 * @opcode 0xca
6168 */
6169FNIEMOP_DEF(iemOp_retf_Iw)
6170{
6171 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6172 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6175 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6176}
6177
6178
6179/**
6180 * @opcode 0xcb
6181 */
6182FNIEMOP_DEF(iemOp_retf)
6183{
6184 IEMOP_MNEMONIC(retf, "retf");
6185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6187 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6188}
6189
6190
6191/**
6192 * @opcode 0xcc
6193 */
6194FNIEMOP_DEF(iemOp_int3)
6195{
6196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6197 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6198}
6199
6200
6201/**
6202 * @opcode 0xcd
6203 */
6204FNIEMOP_DEF(iemOp_int_Ib)
6205{
6206 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6208 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6209}
6210
6211
6212/**
6213 * @opcode 0xce
6214 */
6215FNIEMOP_DEF(iemOp_into)
6216{
6217 IEMOP_MNEMONIC(into, "into");
6218 IEMOP_HLP_NO_64BIT();
6219
6220 IEM_MC_BEGIN(2, 0);
6221 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6222 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6223 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6224 IEM_MC_END();
6225 return VINF_SUCCESS;
6226}
6227
6228
6229/**
6230 * @opcode 0xcf
6231 */
6232FNIEMOP_DEF(iemOp_iret)
6233{
6234 IEMOP_MNEMONIC(iret, "iret");
6235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6236 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6237}
6238
6239
6240/**
6241 * @opcode 0xd0
6242 */
6243FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6244{
6245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6246 PCIEMOPSHIFTSIZES pImpl;
6247 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6248 {
6249 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6250 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6251 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6252 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6253 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6254 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6255 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6256 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6257 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6258 }
6259 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6260
6261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6262 {
6263 /* register */
6264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6265 IEM_MC_BEGIN(3, 0);
6266 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6267 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6268 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6269 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6270 IEM_MC_REF_EFLAGS(pEFlags);
6271 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6272 IEM_MC_ADVANCE_RIP();
6273 IEM_MC_END();
6274 }
6275 else
6276 {
6277 /* memory */
6278 IEM_MC_BEGIN(3, 2);
6279 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6280 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6281 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6283
6284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6286 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6289
6290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6291 IEM_MC_COMMIT_EFLAGS(EFlags);
6292 IEM_MC_ADVANCE_RIP();
6293 IEM_MC_END();
6294 }
6295 return VINF_SUCCESS;
6296}
6297
6298
6299
6300/**
6301 * @opcode 0xd1
6302 */
6303FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6304{
6305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6306 PCIEMOPSHIFTSIZES pImpl;
6307 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6308 {
6309 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6310 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6311 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6312 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6313 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6314 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6315 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6316 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6317 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6318 }
6319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6320
6321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6322 {
6323 /* register */
6324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6325 switch (pVCpu->iem.s.enmEffOpSize)
6326 {
6327 case IEMMODE_16BIT:
6328 IEM_MC_BEGIN(3, 0);
6329 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6330 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6332 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6333 IEM_MC_REF_EFLAGS(pEFlags);
6334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6335 IEM_MC_ADVANCE_RIP();
6336 IEM_MC_END();
6337 return VINF_SUCCESS;
6338
6339 case IEMMODE_32BIT:
6340 IEM_MC_BEGIN(3, 0);
6341 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6342 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6343 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6344 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6345 IEM_MC_REF_EFLAGS(pEFlags);
6346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6347 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6348 IEM_MC_ADVANCE_RIP();
6349 IEM_MC_END();
6350 return VINF_SUCCESS;
6351
6352 case IEMMODE_64BIT:
6353 IEM_MC_BEGIN(3, 0);
6354 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6355 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6356 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6357 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6358 IEM_MC_REF_EFLAGS(pEFlags);
6359 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 return VINF_SUCCESS;
6363
6364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6365 }
6366 }
6367 else
6368 {
6369 /* memory */
6370 switch (pVCpu->iem.s.enmEffOpSize)
6371 {
6372 case IEMMODE_16BIT:
6373 IEM_MC_BEGIN(3, 2);
6374 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6375 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6378
6379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6381 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6382 IEM_MC_FETCH_EFLAGS(EFlags);
6383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6384
6385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6386 IEM_MC_COMMIT_EFLAGS(EFlags);
6387 IEM_MC_ADVANCE_RIP();
6388 IEM_MC_END();
6389 return VINF_SUCCESS;
6390
6391 case IEMMODE_32BIT:
6392 IEM_MC_BEGIN(3, 2);
6393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6394 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6397
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6401 IEM_MC_FETCH_EFLAGS(EFlags);
6402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6403
6404 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6405 IEM_MC_COMMIT_EFLAGS(EFlags);
6406 IEM_MC_ADVANCE_RIP();
6407 IEM_MC_END();
6408 return VINF_SUCCESS;
6409
6410 case IEMMODE_64BIT:
6411 IEM_MC_BEGIN(3, 2);
6412 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6413 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6414 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6416
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6420 IEM_MC_FETCH_EFLAGS(EFlags);
6421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6422
6423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6424 IEM_MC_COMMIT_EFLAGS(EFlags);
6425 IEM_MC_ADVANCE_RIP();
6426 IEM_MC_END();
6427 return VINF_SUCCESS;
6428
6429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6430 }
6431 }
6432}
6433
6434
6435/**
6436 * @opcode 0xd2
6437 */
6438FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6439{
6440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6441 PCIEMOPSHIFTSIZES pImpl;
6442 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6443 {
6444 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6445 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6446 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6447 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6448 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6449 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6450 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6451 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6452 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6453 }
6454 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6455
6456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6457 {
6458 /* register */
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_BEGIN(3, 0);
6461 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6462 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6463 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6464 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6465 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6466 IEM_MC_REF_EFLAGS(pEFlags);
6467 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6468 IEM_MC_ADVANCE_RIP();
6469 IEM_MC_END();
6470 }
6471 else
6472 {
6473 /* memory */
6474 IEM_MC_BEGIN(3, 2);
6475 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6476 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6477 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6479
6480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6482 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6483 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6484 IEM_MC_FETCH_EFLAGS(EFlags);
6485 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6486
6487 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6488 IEM_MC_COMMIT_EFLAGS(EFlags);
6489 IEM_MC_ADVANCE_RIP();
6490 IEM_MC_END();
6491 }
6492 return VINF_SUCCESS;
6493}
6494
6495
6496/**
6497 * @opcode 0xd3
6498 */
6499FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6500{
6501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6502 PCIEMOPSHIFTSIZES pImpl;
6503 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6504 {
6505 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6506 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6507 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6508 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6509 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6510 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6511 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6512 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6513 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6514 }
6515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6516
6517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6518 {
6519 /* register */
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521 switch (pVCpu->iem.s.enmEffOpSize)
6522 {
6523 case IEMMODE_16BIT:
6524 IEM_MC_BEGIN(3, 0);
6525 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6526 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6527 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6528 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6529 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6530 IEM_MC_REF_EFLAGS(pEFlags);
6531 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6532 IEM_MC_ADVANCE_RIP();
6533 IEM_MC_END();
6534 return VINF_SUCCESS;
6535
6536 case IEMMODE_32BIT:
6537 IEM_MC_BEGIN(3, 0);
6538 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6539 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6540 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6541 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6542 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6543 IEM_MC_REF_EFLAGS(pEFlags);
6544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6545 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6546 IEM_MC_ADVANCE_RIP();
6547 IEM_MC_END();
6548 return VINF_SUCCESS;
6549
6550 case IEMMODE_64BIT:
6551 IEM_MC_BEGIN(3, 0);
6552 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6553 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6554 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6555 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6556 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6557 IEM_MC_REF_EFLAGS(pEFlags);
6558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562
6563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6564 }
6565 }
6566 else
6567 {
6568 /* memory */
6569 switch (pVCpu->iem.s.enmEffOpSize)
6570 {
6571 case IEMMODE_16BIT:
6572 IEM_MC_BEGIN(3, 2);
6573 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6574 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6575 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6577
6578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6581 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6582 IEM_MC_FETCH_EFLAGS(EFlags);
6583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6584
6585 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6586 IEM_MC_COMMIT_EFLAGS(EFlags);
6587 IEM_MC_ADVANCE_RIP();
6588 IEM_MC_END();
6589 return VINF_SUCCESS;
6590
6591 case IEMMODE_32BIT:
6592 IEM_MC_BEGIN(3, 2);
6593 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6594 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6595 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6597
6598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6600 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6601 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6602 IEM_MC_FETCH_EFLAGS(EFlags);
6603 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6604
6605 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6606 IEM_MC_COMMIT_EFLAGS(EFlags);
6607 IEM_MC_ADVANCE_RIP();
6608 IEM_MC_END();
6609 return VINF_SUCCESS;
6610
6611 case IEMMODE_64BIT:
6612 IEM_MC_BEGIN(3, 2);
6613 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6614 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6615 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6617
6618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6621 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6622 IEM_MC_FETCH_EFLAGS(EFlags);
6623 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6624
6625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6626 IEM_MC_COMMIT_EFLAGS(EFlags);
6627 IEM_MC_ADVANCE_RIP();
6628 IEM_MC_END();
6629 return VINF_SUCCESS;
6630
6631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6632 }
6633 }
6634}
6635
6636/**
6637 * @opcode 0xd4
6638 */
6639FNIEMOP_DEF(iemOp_aam_Ib)
6640{
6641 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6642 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6644 IEMOP_HLP_NO_64BIT();
6645 if (!bImm)
6646 return IEMOP_RAISE_DIVIDE_ERROR();
6647 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6648}
6649
6650
6651/**
6652 * @opcode 0xd5
6653 */
6654FNIEMOP_DEF(iemOp_aad_Ib)
6655{
6656 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6657 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6659 IEMOP_HLP_NO_64BIT();
6660 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6661}
6662
6663
6664/**
6665 * @opcode 0xd6
6666 */
6667FNIEMOP_DEF(iemOp_salc)
6668{
6669 IEMOP_MNEMONIC(salc, "salc");
6670 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6671 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6673 IEMOP_HLP_NO_64BIT();
6674
6675 IEM_MC_BEGIN(0, 0);
6676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6677 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6678 } IEM_MC_ELSE() {
6679 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6680 } IEM_MC_ENDIF();
6681 IEM_MC_ADVANCE_RIP();
6682 IEM_MC_END();
6683 return VINF_SUCCESS;
6684}
6685
6686
6687/**
6688 * @opcode 0xd7
6689 */
6690FNIEMOP_DEF(iemOp_xlat)
6691{
6692 IEMOP_MNEMONIC(xlat, "xlat");
6693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6694 switch (pVCpu->iem.s.enmEffAddrMode)
6695 {
6696 case IEMMODE_16BIT:
6697 IEM_MC_BEGIN(2, 0);
6698 IEM_MC_LOCAL(uint8_t, u8Tmp);
6699 IEM_MC_LOCAL(uint16_t, u16Addr);
6700 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6701 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6702 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6703 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6704 IEM_MC_ADVANCE_RIP();
6705 IEM_MC_END();
6706 return VINF_SUCCESS;
6707
6708 case IEMMODE_32BIT:
6709 IEM_MC_BEGIN(2, 0);
6710 IEM_MC_LOCAL(uint8_t, u8Tmp);
6711 IEM_MC_LOCAL(uint32_t, u32Addr);
6712 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6713 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6714 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6715 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6716 IEM_MC_ADVANCE_RIP();
6717 IEM_MC_END();
6718 return VINF_SUCCESS;
6719
6720 case IEMMODE_64BIT:
6721 IEM_MC_BEGIN(2, 0);
6722 IEM_MC_LOCAL(uint8_t, u8Tmp);
6723 IEM_MC_LOCAL(uint64_t, u64Addr);
6724 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6725 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6726 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6727 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6728 IEM_MC_ADVANCE_RIP();
6729 IEM_MC_END();
6730 return VINF_SUCCESS;
6731
6732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6733 }
6734}
6735
6736
6737/**
6738 * Common worker for FPU instructions working on ST0 and STn, and storing the
6739 * result in ST0.
6740 *
6741 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6742 */
6743FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6744{
6745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6746
6747 IEM_MC_BEGIN(3, 1);
6748 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6749 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6751 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6752
6753 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6754 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6755 IEM_MC_PREPARE_FPU_USAGE();
6756 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6757 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6758 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6759 IEM_MC_ELSE()
6760 IEM_MC_FPU_STACK_UNDERFLOW(0);
6761 IEM_MC_ENDIF();
6762 IEM_MC_ADVANCE_RIP();
6763
6764 IEM_MC_END();
6765 return VINF_SUCCESS;
6766}
6767
6768
6769/**
6770 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6771 * flags.
6772 *
6773 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6774 */
6775FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6776{
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778
6779 IEM_MC_BEGIN(3, 1);
6780 IEM_MC_LOCAL(uint16_t, u16Fsw);
6781 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6782 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6783 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6784
6785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6787 IEM_MC_PREPARE_FPU_USAGE();
6788 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6789 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6790 IEM_MC_UPDATE_FSW(u16Fsw);
6791 IEM_MC_ELSE()
6792 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6793 IEM_MC_ENDIF();
6794 IEM_MC_ADVANCE_RIP();
6795
6796 IEM_MC_END();
6797 return VINF_SUCCESS;
6798}
6799
6800
6801/**
6802 * Common worker for FPU instructions working on ST0 and STn, only affecting
6803 * flags, and popping when done.
6804 *
6805 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6806 */
6807FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6808{
6809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6810
6811 IEM_MC_BEGIN(3, 1);
6812 IEM_MC_LOCAL(uint16_t, u16Fsw);
6813 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6814 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6815 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6816
6817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6819 IEM_MC_PREPARE_FPU_USAGE();
6820 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6821 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6822 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6823 IEM_MC_ELSE()
6824 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6825 IEM_MC_ENDIF();
6826 IEM_MC_ADVANCE_RIP();
6827
6828 IEM_MC_END();
6829 return VINF_SUCCESS;
6830}
6831
6832
6833/** Opcode 0xd8 11/0. */
6834FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6835{
6836 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6837 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6838}
6839
6840
6841/** Opcode 0xd8 11/1. */
6842FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6843{
6844 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6845 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6846}
6847
6848
6849/** Opcode 0xd8 11/2. */
6850FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6851{
6852 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6853 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6854}
6855
6856
6857/** Opcode 0xd8 11/3. */
6858FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6859{
6860 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6861 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6862}
6863
6864
6865/** Opcode 0xd8 11/4. */
6866FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6867{
6868 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6869 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6870}
6871
6872
6873/** Opcode 0xd8 11/5. */
6874FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6875{
6876 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6877 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6878}
6879
6880
6881/** Opcode 0xd8 11/6. */
6882FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6883{
6884 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6885 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6886}
6887
6888
6889/** Opcode 0xd8 11/7. */
6890FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6891{
6892 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6893 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6894}
6895
6896
6897/**
6898 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6899 * the result in ST0.
6900 *
6901 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6902 */
6903FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6904{
6905 IEM_MC_BEGIN(3, 3);
6906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6907 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6908 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6909 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6911 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6912
6913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6915
6916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6917 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6918 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6919
6920 IEM_MC_PREPARE_FPU_USAGE();
6921 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6922 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6923 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6924 IEM_MC_ELSE()
6925 IEM_MC_FPU_STACK_UNDERFLOW(0);
6926 IEM_MC_ENDIF();
6927 IEM_MC_ADVANCE_RIP();
6928
6929 IEM_MC_END();
6930 return VINF_SUCCESS;
6931}
6932
6933
6934/** Opcode 0xd8 !11/0. */
6935FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6936{
6937 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6938 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6939}
6940
6941
6942/** Opcode 0xd8 !11/1. */
6943FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6944{
6945 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6946 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6947}
6948
6949
6950/** Opcode 0xd8 !11/2. */
6951FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6952{
6953 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6954
6955 IEM_MC_BEGIN(3, 3);
6956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6957 IEM_MC_LOCAL(uint16_t, u16Fsw);
6958 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6959 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6961 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6962
6963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6965
6966 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6967 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6968 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6969
6970 IEM_MC_PREPARE_FPU_USAGE();
6971 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6972 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6973 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6974 IEM_MC_ELSE()
6975 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6976 IEM_MC_ENDIF();
6977 IEM_MC_ADVANCE_RIP();
6978
6979 IEM_MC_END();
6980 return VINF_SUCCESS;
6981}
6982
6983
6984/** Opcode 0xd8 !11/3. */
6985FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6986{
6987 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6988
6989 IEM_MC_BEGIN(3, 3);
6990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6991 IEM_MC_LOCAL(uint16_t, u16Fsw);
6992 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6993 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6995 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6996
6997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6999
7000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7002 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7003
7004 IEM_MC_PREPARE_FPU_USAGE();
7005 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7006 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7007 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7008 IEM_MC_ELSE()
7009 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7010 IEM_MC_ENDIF();
7011 IEM_MC_ADVANCE_RIP();
7012
7013 IEM_MC_END();
7014 return VINF_SUCCESS;
7015}
7016
7017
7018/** Opcode 0xd8 !11/4. */
7019FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7020{
7021 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7023}
7024
7025
7026/** Opcode 0xd8 !11/5. */
7027FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7028{
7029 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7030 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7031}
7032
7033
7034/** Opcode 0xd8 !11/6. */
7035FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7036{
7037 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7038 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7039}
7040
7041
7042/** Opcode 0xd8 !11/7. */
7043FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7044{
7045 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7046 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7047}
7048
7049
7050/**
7051 * @opcode 0xd8
7052 */
7053FNIEMOP_DEF(iemOp_EscF0)
7054{
7055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7056 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7057
7058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7059 {
7060 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7061 {
7062 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7063 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7064 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7065 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7066 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7067 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7068 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7069 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7071 }
7072 }
7073 else
7074 {
7075 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7076 {
7077 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7078 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7079 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7080 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7081 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7082 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7083 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7084 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7086 }
7087 }
7088}
7089
7090
7091/** Opcode 0xd9 /0 mem32real
7092 * @sa iemOp_fld_m64r */
7093FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7094{
7095 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7096
7097 IEM_MC_BEGIN(2, 3);
7098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7099 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7100 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7101 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7102 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7103
7104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7106
7107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7108 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7109 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7110
7111 IEM_MC_PREPARE_FPU_USAGE();
7112 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7113 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7114 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7115 IEM_MC_ELSE()
7116 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7117 IEM_MC_ENDIF();
7118 IEM_MC_ADVANCE_RIP();
7119
7120 IEM_MC_END();
7121 return VINF_SUCCESS;
7122}
7123
7124
7125/** Opcode 0xd9 !11/2 mem32real */
7126FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7127{
7128 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7129 IEM_MC_BEGIN(3, 2);
7130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7131 IEM_MC_LOCAL(uint16_t, u16Fsw);
7132 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7133 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7134 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7135
7136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7138 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7139 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7140
7141 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7142 IEM_MC_PREPARE_FPU_USAGE();
7143 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7144 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7145 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7146 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7147 IEM_MC_ELSE()
7148 IEM_MC_IF_FCW_IM()
7149 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7150 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7151 IEM_MC_ENDIF();
7152 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7153 IEM_MC_ENDIF();
7154 IEM_MC_ADVANCE_RIP();
7155
7156 IEM_MC_END();
7157 return VINF_SUCCESS;
7158}
7159
7160
7161/** Opcode 0xd9 !11/3 */
7162FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7163{
7164 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7165 IEM_MC_BEGIN(3, 2);
7166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7167 IEM_MC_LOCAL(uint16_t, u16Fsw);
7168 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7169 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7170 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7171
7172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7176
7177 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7178 IEM_MC_PREPARE_FPU_USAGE();
7179 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7180 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7181 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7182 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7183 IEM_MC_ELSE()
7184 IEM_MC_IF_FCW_IM()
7185 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7186 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7187 IEM_MC_ENDIF();
7188 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7189 IEM_MC_ENDIF();
7190 IEM_MC_ADVANCE_RIP();
7191
7192 IEM_MC_END();
7193 return VINF_SUCCESS;
7194}
7195
7196
7197/** Opcode 0xd9 !11/4 */
7198FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7199{
7200 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7201 IEM_MC_BEGIN(3, 0);
7202 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7203 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7204 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7208 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7209 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7210 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7211 IEM_MC_END();
7212 return VINF_SUCCESS;
7213}
7214
7215
7216/** Opcode 0xd9 !11/5 */
7217FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7218{
7219 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7220 IEM_MC_BEGIN(1, 1);
7221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7222 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7226 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7227 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7228 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7229 IEM_MC_END();
7230 return VINF_SUCCESS;
7231}
7232
7233
7234/** Opcode 0xd9 !11/6 */
7235FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7236{
7237 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7238 IEM_MC_BEGIN(3, 0);
7239 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7240 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7241 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7245 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7246 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7247 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7248 IEM_MC_END();
7249 return VINF_SUCCESS;
7250}
7251
7252
7253/** Opcode 0xd9 !11/7 */
7254FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7255{
7256 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7257 IEM_MC_BEGIN(2, 0);
7258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7259 IEM_MC_LOCAL(uint16_t, u16Fcw);
7260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7263 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7264 IEM_MC_FETCH_FCW(u16Fcw);
7265 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7266 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7267 IEM_MC_END();
7268 return VINF_SUCCESS;
7269}
7270
7271
7272/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7273FNIEMOP_DEF(iemOp_fnop)
7274{
7275 IEMOP_MNEMONIC(fnop, "fnop");
7276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7277
7278 IEM_MC_BEGIN(0, 0);
7279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7282 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7283 * intel optimizations. Investigate. */
7284 IEM_MC_UPDATE_FPU_OPCODE_IP();
7285 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7286 IEM_MC_END();
7287 return VINF_SUCCESS;
7288}
7289
7290
7291/** Opcode 0xd9 11/0 stN */
7292FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7293{
7294 IEMOP_MNEMONIC(fld_stN, "fld stN");
7295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7296
7297 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7298 * indicates that it does. */
7299 IEM_MC_BEGIN(0, 2);
7300 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7301 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7304
7305 IEM_MC_PREPARE_FPU_USAGE();
7306 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7307 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7308 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7309 IEM_MC_ELSE()
7310 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7311 IEM_MC_ENDIF();
7312
7313 IEM_MC_ADVANCE_RIP();
7314 IEM_MC_END();
7315
7316 return VINF_SUCCESS;
7317}
7318
7319
7320/** Opcode 0xd9 11/3 stN */
7321FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7322{
7323 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7325
7326 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7327 * indicates that it does. */
7328 IEM_MC_BEGIN(1, 3);
7329 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7330 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7331 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7332 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7335
7336 IEM_MC_PREPARE_FPU_USAGE();
7337 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7338 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7339 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7340 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7341 IEM_MC_ELSE()
7342 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7343 IEM_MC_ENDIF();
7344
7345 IEM_MC_ADVANCE_RIP();
7346 IEM_MC_END();
7347
7348 return VINF_SUCCESS;
7349}
7350
7351
7352/** Opcode 0xd9 11/4, 0xdd 11/2. */
7353FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7354{
7355 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7357
7358 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7359 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7360 if (!iDstReg)
7361 {
7362 IEM_MC_BEGIN(0, 1);
7363 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7366
7367 IEM_MC_PREPARE_FPU_USAGE();
7368 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7369 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7370 IEM_MC_ELSE()
7371 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7372 IEM_MC_ENDIF();
7373
7374 IEM_MC_ADVANCE_RIP();
7375 IEM_MC_END();
7376 }
7377 else
7378 {
7379 IEM_MC_BEGIN(0, 2);
7380 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7381 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7382 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7383 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7384
7385 IEM_MC_PREPARE_FPU_USAGE();
7386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7387 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7388 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7389 IEM_MC_ELSE()
7390 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7391 IEM_MC_ENDIF();
7392
7393 IEM_MC_ADVANCE_RIP();
7394 IEM_MC_END();
7395 }
7396 return VINF_SUCCESS;
7397}
7398
7399
7400/**
7401 * Common worker for FPU instructions working on ST0 and replaces it with the
7402 * result, i.e. unary operators.
7403 *
7404 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7405 */
7406FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7407{
7408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7409
7410 IEM_MC_BEGIN(2, 1);
7411 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7412 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7414
7415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7417 IEM_MC_PREPARE_FPU_USAGE();
7418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7419 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7420 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7421 IEM_MC_ELSE()
7422 IEM_MC_FPU_STACK_UNDERFLOW(0);
7423 IEM_MC_ENDIF();
7424 IEM_MC_ADVANCE_RIP();
7425
7426 IEM_MC_END();
7427 return VINF_SUCCESS;
7428}
7429
7430
7431/** Opcode 0xd9 0xe0. */
7432FNIEMOP_DEF(iemOp_fchs)
7433{
7434 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7435 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7436}
7437
7438
7439/** Opcode 0xd9 0xe1. */
7440FNIEMOP_DEF(iemOp_fabs)
7441{
7442 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7443 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7444}
7445
7446
7447/**
7448 * Common worker for FPU instructions working on ST0 and only returns FSW.
7449 *
7450 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7451 */
7452FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7453{
7454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7455
7456 IEM_MC_BEGIN(2, 1);
7457 IEM_MC_LOCAL(uint16_t, u16Fsw);
7458 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7459 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7460
7461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7463 IEM_MC_PREPARE_FPU_USAGE();
7464 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7465 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7466 IEM_MC_UPDATE_FSW(u16Fsw);
7467 IEM_MC_ELSE()
7468 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7469 IEM_MC_ENDIF();
7470 IEM_MC_ADVANCE_RIP();
7471
7472 IEM_MC_END();
7473 return VINF_SUCCESS;
7474}
7475
7476
7477/** Opcode 0xd9 0xe4. */
7478FNIEMOP_DEF(iemOp_ftst)
7479{
7480 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7481 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7482}
7483
7484
7485/** Opcode 0xd9 0xe5. */
7486FNIEMOP_DEF(iemOp_fxam)
7487{
7488 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7489 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7490}
7491
7492
7493/**
7494 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7495 *
7496 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7497 */
7498FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7499{
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7501
7502 IEM_MC_BEGIN(1, 1);
7503 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7504 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7505
7506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7507 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7508 IEM_MC_PREPARE_FPU_USAGE();
7509 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7510 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7511 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7512 IEM_MC_ELSE()
7513 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7514 IEM_MC_ENDIF();
7515 IEM_MC_ADVANCE_RIP();
7516
7517 IEM_MC_END();
7518 return VINF_SUCCESS;
7519}
7520
7521
7522/** Opcode 0xd9 0xe8. */
7523FNIEMOP_DEF(iemOp_fld1)
7524{
7525 IEMOP_MNEMONIC(fld1, "fld1");
7526 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7527}
7528
7529
7530/** Opcode 0xd9 0xe9. */
7531FNIEMOP_DEF(iemOp_fldl2t)
7532{
7533 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7534 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7535}
7536
7537
7538/** Opcode 0xd9 0xea. */
7539FNIEMOP_DEF(iemOp_fldl2e)
7540{
7541 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7542 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7543}
7544
7545/** Opcode 0xd9 0xeb. */
7546FNIEMOP_DEF(iemOp_fldpi)
7547{
7548 IEMOP_MNEMONIC(fldpi, "fldpi");
7549 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7550}
7551
7552
7553/** Opcode 0xd9 0xec. */
7554FNIEMOP_DEF(iemOp_fldlg2)
7555{
7556 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7557 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7558}
7559
7560/** Opcode 0xd9 0xed. */
7561FNIEMOP_DEF(iemOp_fldln2)
7562{
7563 IEMOP_MNEMONIC(fldln2, "fldln2");
7564 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7565}
7566
7567
7568/** Opcode 0xd9 0xee. */
7569FNIEMOP_DEF(iemOp_fldz)
7570{
7571 IEMOP_MNEMONIC(fldz, "fldz");
7572 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7573}
7574
7575
7576/** Opcode 0xd9 0xf0. */
7577FNIEMOP_DEF(iemOp_f2xm1)
7578{
7579 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7580 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7581}
7582
7583
7584/**
7585 * Common worker for FPU instructions working on STn and ST0, storing the result
7586 * in STn, and popping the stack unless IE, DE or ZE was raised.
7587 *
7588 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7589 */
7590FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7591{
7592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7593
7594 IEM_MC_BEGIN(3, 1);
7595 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7596 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7598 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7599
7600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7601 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7602
7603 IEM_MC_PREPARE_FPU_USAGE();
7604 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7605 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7606 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7607 IEM_MC_ELSE()
7608 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7609 IEM_MC_ENDIF();
7610 IEM_MC_ADVANCE_RIP();
7611
7612 IEM_MC_END();
7613 return VINF_SUCCESS;
7614}
7615
7616
7617/** Opcode 0xd9 0xf1. */
7618FNIEMOP_DEF(iemOp_fyl2x)
7619{
7620 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7621 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7622}
7623
7624
7625/**
7626 * Common worker for FPU instructions working on ST0 and having two outputs, one
7627 * replacing ST0 and one pushed onto the stack.
7628 *
7629 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7630 */
7631FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7632{
7633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7634
7635 IEM_MC_BEGIN(2, 1);
7636 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7637 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7638 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7639
7640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7642 IEM_MC_PREPARE_FPU_USAGE();
7643 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7644 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7645 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7646 IEM_MC_ELSE()
7647 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7648 IEM_MC_ENDIF();
7649 IEM_MC_ADVANCE_RIP();
7650
7651 IEM_MC_END();
7652 return VINF_SUCCESS;
7653}
7654
7655
7656/** Opcode 0xd9 0xf2. */
7657FNIEMOP_DEF(iemOp_fptan)
7658{
7659 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7660 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7661}
7662
7663
7664/** Opcode 0xd9 0xf3. */
7665FNIEMOP_DEF(iemOp_fpatan)
7666{
7667 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7668 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7669}
7670
7671
7672/** Opcode 0xd9 0xf4. */
7673FNIEMOP_DEF(iemOp_fxtract)
7674{
7675 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7676 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7677}
7678
7679
7680/** Opcode 0xd9 0xf5. */
7681FNIEMOP_DEF(iemOp_fprem1)
7682{
7683 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7684 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7685}
7686
7687
7688/** Opcode 0xd9 0xf6. */
7689FNIEMOP_DEF(iemOp_fdecstp)
7690{
7691 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7693 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7694 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7695 * FINCSTP and FDECSTP. */
7696
7697 IEM_MC_BEGIN(0,0);
7698
7699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7701
7702 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7703 IEM_MC_FPU_STACK_DEC_TOP();
7704 IEM_MC_UPDATE_FSW_CONST(0);
7705
7706 IEM_MC_ADVANCE_RIP();
7707 IEM_MC_END();
7708 return VINF_SUCCESS;
7709}
7710
7711
7712/** Opcode 0xd9 0xf7. */
7713FNIEMOP_DEF(iemOp_fincstp)
7714{
7715 IEMOP_MNEMONIC(fincstp, "fincstp");
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7718 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7719 * FINCSTP and FDECSTP. */
7720
7721 IEM_MC_BEGIN(0,0);
7722
7723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7725
7726 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7727 IEM_MC_FPU_STACK_INC_TOP();
7728 IEM_MC_UPDATE_FSW_CONST(0);
7729
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733}
7734
7735
7736/** Opcode 0xd9 0xf8. */
7737FNIEMOP_DEF(iemOp_fprem)
7738{
7739 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7740 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7741}
7742
7743
7744/** Opcode 0xd9 0xf9. */
7745FNIEMOP_DEF(iemOp_fyl2xp1)
7746{
7747 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7748 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7749}
7750
7751
7752/** Opcode 0xd9 0xfa. */
7753FNIEMOP_DEF(iemOp_fsqrt)
7754{
7755 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7756 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7757}
7758
7759
7760/** Opcode 0xd9 0xfb. */
7761FNIEMOP_DEF(iemOp_fsincos)
7762{
7763 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7764 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7765}
7766
7767
7768/** Opcode 0xd9 0xfc. */
7769FNIEMOP_DEF(iemOp_frndint)
7770{
7771 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7772 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7773}
7774
7775
7776/** Opcode 0xd9 0xfd. */
7777FNIEMOP_DEF(iemOp_fscale)
7778{
7779 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7781}
7782
7783
7784/** Opcode 0xd9 0xfe. */
7785FNIEMOP_DEF(iemOp_fsin)
7786{
7787 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7788 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7789}
7790
7791
7792/** Opcode 0xd9 0xff. */
7793FNIEMOP_DEF(iemOp_fcos)
7794{
7795 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7796 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7797}
7798
7799
7800/** Used by iemOp_EscF1. */
7801IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7802{
7803 /* 0xe0 */ iemOp_fchs,
7804 /* 0xe1 */ iemOp_fabs,
7805 /* 0xe2 */ iemOp_Invalid,
7806 /* 0xe3 */ iemOp_Invalid,
7807 /* 0xe4 */ iemOp_ftst,
7808 /* 0xe5 */ iemOp_fxam,
7809 /* 0xe6 */ iemOp_Invalid,
7810 /* 0xe7 */ iemOp_Invalid,
7811 /* 0xe8 */ iemOp_fld1,
7812 /* 0xe9 */ iemOp_fldl2t,
7813 /* 0xea */ iemOp_fldl2e,
7814 /* 0xeb */ iemOp_fldpi,
7815 /* 0xec */ iemOp_fldlg2,
7816 /* 0xed */ iemOp_fldln2,
7817 /* 0xee */ iemOp_fldz,
7818 /* 0xef */ iemOp_Invalid,
7819 /* 0xf0 */ iemOp_f2xm1,
7820 /* 0xf1 */ iemOp_fyl2x,
7821 /* 0xf2 */ iemOp_fptan,
7822 /* 0xf3 */ iemOp_fpatan,
7823 /* 0xf4 */ iemOp_fxtract,
7824 /* 0xf5 */ iemOp_fprem1,
7825 /* 0xf6 */ iemOp_fdecstp,
7826 /* 0xf7 */ iemOp_fincstp,
7827 /* 0xf8 */ iemOp_fprem,
7828 /* 0xf9 */ iemOp_fyl2xp1,
7829 /* 0xfa */ iemOp_fsqrt,
7830 /* 0xfb */ iemOp_fsincos,
7831 /* 0xfc */ iemOp_frndint,
7832 /* 0xfd */ iemOp_fscale,
7833 /* 0xfe */ iemOp_fsin,
7834 /* 0xff */ iemOp_fcos
7835};
7836
7837
7838/**
7839 * @opcode 0xd9
7840 */
7841FNIEMOP_DEF(iemOp_EscF1)
7842{
7843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7844 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7845
7846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7847 {
7848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7849 {
7850 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7851 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7852 case 2:
7853 if (bRm == 0xd0)
7854 return FNIEMOP_CALL(iemOp_fnop);
7855 return IEMOP_RAISE_INVALID_OPCODE();
7856 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7857 case 4:
7858 case 5:
7859 case 6:
7860 case 7:
7861 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7862 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7864 }
7865 }
7866 else
7867 {
7868 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7869 {
7870 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7871 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7872 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7873 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7874 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7875 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7876 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7877 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7879 }
7880 }
7881}
7882
7883
7884/** Opcode 0xda 11/0. */
7885FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7886{
7887 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7889
7890 IEM_MC_BEGIN(0, 1);
7891 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7892
7893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7894 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7895
7896 IEM_MC_PREPARE_FPU_USAGE();
7897 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7899 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7900 IEM_MC_ENDIF();
7901 IEM_MC_UPDATE_FPU_OPCODE_IP();
7902 IEM_MC_ELSE()
7903 IEM_MC_FPU_STACK_UNDERFLOW(0);
7904 IEM_MC_ENDIF();
7905 IEM_MC_ADVANCE_RIP();
7906
7907 IEM_MC_END();
7908 return VINF_SUCCESS;
7909}
7910
7911
7912/** Opcode 0xda 11/1. */
7913FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7914{
7915 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7917
7918 IEM_MC_BEGIN(0, 1);
7919 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7920
7921 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7922 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7923
7924 IEM_MC_PREPARE_FPU_USAGE();
7925 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7927 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7928 IEM_MC_ENDIF();
7929 IEM_MC_UPDATE_FPU_OPCODE_IP();
7930 IEM_MC_ELSE()
7931 IEM_MC_FPU_STACK_UNDERFLOW(0);
7932 IEM_MC_ENDIF();
7933 IEM_MC_ADVANCE_RIP();
7934
7935 IEM_MC_END();
7936 return VINF_SUCCESS;
7937}
7938
7939
7940/** Opcode 0xda 11/2. */
7941FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7942{
7943 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7945
7946 IEM_MC_BEGIN(0, 1);
7947 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7948
7949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7951
7952 IEM_MC_PREPARE_FPU_USAGE();
7953 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7954 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7955 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7956 IEM_MC_ENDIF();
7957 IEM_MC_UPDATE_FPU_OPCODE_IP();
7958 IEM_MC_ELSE()
7959 IEM_MC_FPU_STACK_UNDERFLOW(0);
7960 IEM_MC_ENDIF();
7961 IEM_MC_ADVANCE_RIP();
7962
7963 IEM_MC_END();
7964 return VINF_SUCCESS;
7965}
7966
7967
7968/** Opcode 0xda 11/3. */
7969FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7970{
7971 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7973
7974 IEM_MC_BEGIN(0, 1);
7975 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7976
7977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7979
7980 IEM_MC_PREPARE_FPU_USAGE();
7981 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7983 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7984 IEM_MC_ENDIF();
7985 IEM_MC_UPDATE_FPU_OPCODE_IP();
7986 IEM_MC_ELSE()
7987 IEM_MC_FPU_STACK_UNDERFLOW(0);
7988 IEM_MC_ENDIF();
7989 IEM_MC_ADVANCE_RIP();
7990
7991 IEM_MC_END();
7992 return VINF_SUCCESS;
7993}
7994
7995
7996/**
7997 * Common worker for FPU instructions working on ST0 and STn, only affecting
7998 * flags, and popping twice when done.
7999 *
8000 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8001 */
8002FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8003{
8004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8005
8006 IEM_MC_BEGIN(3, 1);
8007 IEM_MC_LOCAL(uint16_t, u16Fsw);
8008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8010 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8011
8012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8014
8015 IEM_MC_PREPARE_FPU_USAGE();
8016 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8017 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8018 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8019 IEM_MC_ELSE()
8020 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8021 IEM_MC_ENDIF();
8022 IEM_MC_ADVANCE_RIP();
8023
8024 IEM_MC_END();
8025 return VINF_SUCCESS;
8026}
8027
8028
8029/** Opcode 0xda 0xe9. */
8030FNIEMOP_DEF(iemOp_fucompp)
8031{
8032 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8033 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8034}
8035
8036
8037/**
8038 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8039 * the result in ST0.
8040 *
8041 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8042 */
8043FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8044{
8045 IEM_MC_BEGIN(3, 3);
8046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8047 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8048 IEM_MC_LOCAL(int32_t, i32Val2);
8049 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8050 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8051 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8052
8053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8055
8056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8058 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8059
8060 IEM_MC_PREPARE_FPU_USAGE();
8061 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8062 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8063 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8064 IEM_MC_ELSE()
8065 IEM_MC_FPU_STACK_UNDERFLOW(0);
8066 IEM_MC_ENDIF();
8067 IEM_MC_ADVANCE_RIP();
8068
8069 IEM_MC_END();
8070 return VINF_SUCCESS;
8071}
8072
8073
8074/** Opcode 0xda !11/0. */
8075FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8076{
8077 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8078 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8079}
8080
8081
8082/** Opcode 0xda !11/1. */
8083FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8084{
8085 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8086 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8087}
8088
8089
8090/** Opcode 0xda !11/2. */
8091FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8092{
8093 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8094
8095 IEM_MC_BEGIN(3, 3);
8096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8097 IEM_MC_LOCAL(uint16_t, u16Fsw);
8098 IEM_MC_LOCAL(int32_t, i32Val2);
8099 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8100 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8101 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8102
8103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8105
8106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8108 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8109
8110 IEM_MC_PREPARE_FPU_USAGE();
8111 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8112 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8113 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8114 IEM_MC_ELSE()
8115 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8116 IEM_MC_ENDIF();
8117 IEM_MC_ADVANCE_RIP();
8118
8119 IEM_MC_END();
8120 return VINF_SUCCESS;
8121}
8122
8123
8124/** Opcode 0xda !11/3. */
8125FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8126{
8127 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8128
8129 IEM_MC_BEGIN(3, 3);
8130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8131 IEM_MC_LOCAL(uint16_t, u16Fsw);
8132 IEM_MC_LOCAL(int32_t, i32Val2);
8133 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8134 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8135 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8136
8137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8139
8140 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8141 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8142 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8143
8144 IEM_MC_PREPARE_FPU_USAGE();
8145 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8146 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8147 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8148 IEM_MC_ELSE()
8149 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8150 IEM_MC_ENDIF();
8151 IEM_MC_ADVANCE_RIP();
8152
8153 IEM_MC_END();
8154 return VINF_SUCCESS;
8155}
8156
8157
8158/** Opcode 0xda !11/4. */
8159FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8160{
8161 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8162 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8163}
8164
8165
8166/** Opcode 0xda !11/5. */
8167FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8168{
8169 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8171}
8172
8173
8174/** Opcode 0xda !11/6. */
8175FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8176{
8177 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8178 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8179}
8180
8181
8182/** Opcode 0xda !11/7. */
8183FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8184{
8185 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8186 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8187}
8188
8189
8190/**
8191 * @opcode 0xda
8192 */
8193FNIEMOP_DEF(iemOp_EscF2)
8194{
8195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8196 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8198 {
8199 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8200 {
8201 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8202 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8203 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8204 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8205 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8206 case 5:
8207 if (bRm == 0xe9)
8208 return FNIEMOP_CALL(iemOp_fucompp);
8209 return IEMOP_RAISE_INVALID_OPCODE();
8210 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8211 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8213 }
8214 }
8215 else
8216 {
8217 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8218 {
8219 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8220 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8221 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8222 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8223 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8224 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8225 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8226 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8228 }
8229 }
8230}
8231
8232
8233/** Opcode 0xdb !11/0. */
8234FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8235{
8236 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8237
8238 IEM_MC_BEGIN(2, 3);
8239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8240 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8241 IEM_MC_LOCAL(int32_t, i32Val);
8242 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8243 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8244
8245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8247
8248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8250 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8251
8252 IEM_MC_PREPARE_FPU_USAGE();
8253 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8254 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8255 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8256 IEM_MC_ELSE()
8257 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8258 IEM_MC_ENDIF();
8259 IEM_MC_ADVANCE_RIP();
8260
8261 IEM_MC_END();
8262 return VINF_SUCCESS;
8263}
8264
8265
8266/** Opcode 0xdb !11/1. */
8267FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8268{
8269 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8270 IEM_MC_BEGIN(3, 2);
8271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8272 IEM_MC_LOCAL(uint16_t, u16Fsw);
8273 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8274 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8275 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8276
8277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8281
8282 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8283 IEM_MC_PREPARE_FPU_USAGE();
8284 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8285 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8286 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8287 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8288 IEM_MC_ELSE()
8289 IEM_MC_IF_FCW_IM()
8290 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8291 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8292 IEM_MC_ENDIF();
8293 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8294 IEM_MC_ENDIF();
8295 IEM_MC_ADVANCE_RIP();
8296
8297 IEM_MC_END();
8298 return VINF_SUCCESS;
8299}
8300
8301
8302/** Opcode 0xdb !11/2. */
8303FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8304{
8305 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8306 IEM_MC_BEGIN(3, 2);
8307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8308 IEM_MC_LOCAL(uint16_t, u16Fsw);
8309 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8310 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8311 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8312
8313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8316 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8317
8318 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8319 IEM_MC_PREPARE_FPU_USAGE();
8320 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8321 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8322 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8323 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8324 IEM_MC_ELSE()
8325 IEM_MC_IF_FCW_IM()
8326 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8327 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8328 IEM_MC_ENDIF();
8329 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8330 IEM_MC_ENDIF();
8331 IEM_MC_ADVANCE_RIP();
8332
8333 IEM_MC_END();
8334 return VINF_SUCCESS;
8335}
8336
8337
8338/** Opcode 0xdb !11/3. */
8339FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8340{
8341 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8342 IEM_MC_BEGIN(3, 2);
8343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8344 IEM_MC_LOCAL(uint16_t, u16Fsw);
8345 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8346 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8347 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8348
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8352 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8353
8354 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8355 IEM_MC_PREPARE_FPU_USAGE();
8356 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8357 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8358 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8359 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8360 IEM_MC_ELSE()
8361 IEM_MC_IF_FCW_IM()
8362 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8363 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8364 IEM_MC_ENDIF();
8365 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8366 IEM_MC_ENDIF();
8367 IEM_MC_ADVANCE_RIP();
8368
8369 IEM_MC_END();
8370 return VINF_SUCCESS;
8371}
8372
8373
8374/** Opcode 0xdb !11/5. */
8375FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8376{
8377 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8378
8379 IEM_MC_BEGIN(2, 3);
8380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8381 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8382 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8383 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8384 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8385
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388
8389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8391 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8392
8393 IEM_MC_PREPARE_FPU_USAGE();
8394 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8395 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8396 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8397 IEM_MC_ELSE()
8398 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8399 IEM_MC_ENDIF();
8400 IEM_MC_ADVANCE_RIP();
8401
8402 IEM_MC_END();
8403 return VINF_SUCCESS;
8404}
8405
8406
8407/** Opcode 0xdb !11/7. */
8408FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8409{
8410 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8411 IEM_MC_BEGIN(3, 2);
8412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8413 IEM_MC_LOCAL(uint16_t, u16Fsw);
8414 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8415 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8417
8418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8422
8423 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8424 IEM_MC_PREPARE_FPU_USAGE();
8425 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8426 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8427 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8428 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8429 IEM_MC_ELSE()
8430 IEM_MC_IF_FCW_IM()
8431 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8432 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8433 IEM_MC_ENDIF();
8434 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8435 IEM_MC_ENDIF();
8436 IEM_MC_ADVANCE_RIP();
8437
8438 IEM_MC_END();
8439 return VINF_SUCCESS;
8440}
8441
8442
8443/** Opcode 0xdb 11/0. */
8444FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8445{
8446 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8448
8449 IEM_MC_BEGIN(0, 1);
8450 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8451
8452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8454
8455 IEM_MC_PREPARE_FPU_USAGE();
8456 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8457 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8458 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8459 IEM_MC_ENDIF();
8460 IEM_MC_UPDATE_FPU_OPCODE_IP();
8461 IEM_MC_ELSE()
8462 IEM_MC_FPU_STACK_UNDERFLOW(0);
8463 IEM_MC_ENDIF();
8464 IEM_MC_ADVANCE_RIP();
8465
8466 IEM_MC_END();
8467 return VINF_SUCCESS;
8468}
8469
8470
8471/** Opcode 0xdb 11/1. */
8472FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8473{
8474 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8476
8477 IEM_MC_BEGIN(0, 1);
8478 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8479
8480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8482
8483 IEM_MC_PREPARE_FPU_USAGE();
8484 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8485 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8486 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8487 IEM_MC_ENDIF();
8488 IEM_MC_UPDATE_FPU_OPCODE_IP();
8489 IEM_MC_ELSE()
8490 IEM_MC_FPU_STACK_UNDERFLOW(0);
8491 IEM_MC_ENDIF();
8492 IEM_MC_ADVANCE_RIP();
8493
8494 IEM_MC_END();
8495 return VINF_SUCCESS;
8496}
8497
8498
8499/** Opcode 0xdb 11/2. */
8500FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8501{
8502 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504
8505 IEM_MC_BEGIN(0, 1);
8506 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8507
8508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8510
8511 IEM_MC_PREPARE_FPU_USAGE();
8512 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8513 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8514 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8515 IEM_MC_ENDIF();
8516 IEM_MC_UPDATE_FPU_OPCODE_IP();
8517 IEM_MC_ELSE()
8518 IEM_MC_FPU_STACK_UNDERFLOW(0);
8519 IEM_MC_ENDIF();
8520 IEM_MC_ADVANCE_RIP();
8521
8522 IEM_MC_END();
8523 return VINF_SUCCESS;
8524}
8525
8526
8527/** Opcode 0xdb 11/3. */
8528FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8529{
8530 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532
8533 IEM_MC_BEGIN(0, 1);
8534 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8535
8536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8538
8539 IEM_MC_PREPARE_FPU_USAGE();
8540 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8541 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8542 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8543 IEM_MC_ENDIF();
8544 IEM_MC_UPDATE_FPU_OPCODE_IP();
8545 IEM_MC_ELSE()
8546 IEM_MC_FPU_STACK_UNDERFLOW(0);
8547 IEM_MC_ENDIF();
8548 IEM_MC_ADVANCE_RIP();
8549
8550 IEM_MC_END();
8551 return VINF_SUCCESS;
8552}
8553
8554
8555/** Opcode 0xdb 0xe0. */
8556FNIEMOP_DEF(iemOp_fneni)
8557{
8558 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8560 IEM_MC_BEGIN(0,0);
8561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8562 IEM_MC_ADVANCE_RIP();
8563 IEM_MC_END();
8564 return VINF_SUCCESS;
8565}
8566
8567
8568/** Opcode 0xdb 0xe1. */
8569FNIEMOP_DEF(iemOp_fndisi)
8570{
8571 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8573 IEM_MC_BEGIN(0,0);
8574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8575 IEM_MC_ADVANCE_RIP();
8576 IEM_MC_END();
8577 return VINF_SUCCESS;
8578}
8579
8580
8581/** Opcode 0xdb 0xe2. */
8582FNIEMOP_DEF(iemOp_fnclex)
8583{
8584 IEMOP_MNEMONIC(fnclex, "fnclex");
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586
8587 IEM_MC_BEGIN(0,0);
8588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8589 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8590 IEM_MC_CLEAR_FSW_EX();
8591 IEM_MC_ADVANCE_RIP();
8592 IEM_MC_END();
8593 return VINF_SUCCESS;
8594}
8595
8596
8597/** Opcode 0xdb 0xe3. */
8598FNIEMOP_DEF(iemOp_fninit)
8599{
8600 IEMOP_MNEMONIC(fninit, "fninit");
8601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8602 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8603}
8604
8605
8606/** Opcode 0xdb 0xe4. */
8607FNIEMOP_DEF(iemOp_fnsetpm)
8608{
8609 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8611 IEM_MC_BEGIN(0,0);
8612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8613 IEM_MC_ADVANCE_RIP();
8614 IEM_MC_END();
8615 return VINF_SUCCESS;
8616}
8617
8618
8619/** Opcode 0xdb 0xe5. */
8620FNIEMOP_DEF(iemOp_frstpm)
8621{
8622 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8623#if 0 /* #UDs on newer CPUs */
8624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8625 IEM_MC_BEGIN(0,0);
8626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8627 IEM_MC_ADVANCE_RIP();
8628 IEM_MC_END();
8629 return VINF_SUCCESS;
8630#else
8631 return IEMOP_RAISE_INVALID_OPCODE();
8632#endif
8633}
8634
8635
8636/** Opcode 0xdb 11/5. */
8637FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8638{
8639 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8640 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8641}
8642
8643
8644/** Opcode 0xdb 11/6. */
8645FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8646{
8647 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8648 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8649}
8650
8651
8652/**
8653 * @opcode 0xdb
8654 */
8655FNIEMOP_DEF(iemOp_EscF3)
8656{
8657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8658 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8660 {
8661 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8662 {
8663 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8664 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8665 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8666 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8667 case 4:
8668 switch (bRm)
8669 {
8670 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8671 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8672 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8673 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8674 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8675 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8676 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8677 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8679 }
8680 break;
8681 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8682 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8683 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8685 }
8686 }
8687 else
8688 {
8689 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8690 {
8691 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8692 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8693 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8694 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8695 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8696 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8697 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8698 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8700 }
8701 }
8702}
8703
8704
8705/**
8706 * Common worker for FPU instructions working on STn and ST0, and storing the
8707 * result in STn unless IE, DE or ZE was raised.
8708 *
8709 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8710 */
8711FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8712{
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714
8715 IEM_MC_BEGIN(3, 1);
8716 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8717 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8718 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8720
8721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8723
8724 IEM_MC_PREPARE_FPU_USAGE();
8725 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8726 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8727 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8728 IEM_MC_ELSE()
8729 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8730 IEM_MC_ENDIF();
8731 IEM_MC_ADVANCE_RIP();
8732
8733 IEM_MC_END();
8734 return VINF_SUCCESS;
8735}
8736
8737
8738/** Opcode 0xdc 11/0. */
8739FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8740{
8741 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8742 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8743}
8744
8745
8746/** Opcode 0xdc 11/1. */
8747FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8748{
8749 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8750 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8751}
8752
8753
8754/** Opcode 0xdc 11/4. */
8755FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8756{
8757 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8758 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8759}
8760
8761
8762/** Opcode 0xdc 11/5. */
8763FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8764{
8765 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8766 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8767}
8768
8769
8770/** Opcode 0xdc 11/6. */
8771FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8772{
8773 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8774 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8775}
8776
8777
8778/** Opcode 0xdc 11/7. */
8779FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8780{
8781 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8782 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8783}
8784
8785
8786/**
8787 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8788 * memory operand, and storing the result in ST0.
8789 *
8790 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8791 */
8792FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8793{
8794 IEM_MC_BEGIN(3, 3);
8795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8796 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8797 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8798 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8799 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8800 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8801
8802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8806
8807 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8808 IEM_MC_PREPARE_FPU_USAGE();
8809 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8810 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8811 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8812 IEM_MC_ELSE()
8813 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8814 IEM_MC_ENDIF();
8815 IEM_MC_ADVANCE_RIP();
8816
8817 IEM_MC_END();
8818 return VINF_SUCCESS;
8819}
8820
8821
8822/** Opcode 0xdc !11/0. */
8823FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8824{
8825 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8826 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8827}
8828
8829
8830/** Opcode 0xdc !11/1. */
8831FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8832{
8833 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8834 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8835}
8836
8837
8838/** Opcode 0xdc !11/2. */
8839FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8840{
8841 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8842
8843 IEM_MC_BEGIN(3, 3);
8844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8845 IEM_MC_LOCAL(uint16_t, u16Fsw);
8846 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8847 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8848 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8849 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8850
8851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8853
8854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8856 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8857
8858 IEM_MC_PREPARE_FPU_USAGE();
8859 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8860 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8861 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8862 IEM_MC_ELSE()
8863 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8864 IEM_MC_ENDIF();
8865 IEM_MC_ADVANCE_RIP();
8866
8867 IEM_MC_END();
8868 return VINF_SUCCESS;
8869}
8870
8871
8872/** Opcode 0xdc !11/3. */
8873FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8874{
8875 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8876
8877 IEM_MC_BEGIN(3, 3);
8878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8879 IEM_MC_LOCAL(uint16_t, u16Fsw);
8880 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8881 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8883 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8884
8885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887
8888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8890 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8891
8892 IEM_MC_PREPARE_FPU_USAGE();
8893 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8894 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8895 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8896 IEM_MC_ELSE()
8897 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8898 IEM_MC_ENDIF();
8899 IEM_MC_ADVANCE_RIP();
8900
8901 IEM_MC_END();
8902 return VINF_SUCCESS;
8903}
8904
8905
8906/** Opcode 0xdc !11/4. */
8907FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8908{
8909 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8910 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8911}
8912
8913
8914/** Opcode 0xdc !11/5. */
8915FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8916{
8917 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8918 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8919}
8920
8921
8922/** Opcode 0xdc !11/6. */
8923FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8924{
8925 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8926 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8927}
8928
8929
8930/** Opcode 0xdc !11/7. */
8931FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8932{
8933 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8934 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8935}
8936
8937
8938/**
8939 * @opcode 0xdc
8940 */
8941FNIEMOP_DEF(iemOp_EscF4)
8942{
8943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8944 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8945 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8946 {
8947 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8948 {
8949 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8950 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8951 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8952 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8953 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8954 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8955 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8956 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8958 }
8959 }
8960 else
8961 {
8962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8963 {
8964 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8965 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8966 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8967 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8968 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8969 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8970 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8971 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8973 }
8974 }
8975}
8976
8977
8978/** Opcode 0xdd !11/0.
8979 * @sa iemOp_fld_m32r */
8980FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8981{
8982 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8983
8984 IEM_MC_BEGIN(2, 3);
8985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8986 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8987 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8988 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8989 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8990
8991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8995
8996 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8997 IEM_MC_PREPARE_FPU_USAGE();
8998 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8999 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9000 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9001 IEM_MC_ELSE()
9002 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9003 IEM_MC_ENDIF();
9004 IEM_MC_ADVANCE_RIP();
9005
9006 IEM_MC_END();
9007 return VINF_SUCCESS;
9008}
9009
9010
9011/** Opcode 0xdd !11/0. */
9012FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9013{
9014 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9015 IEM_MC_BEGIN(3, 2);
9016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9017 IEM_MC_LOCAL(uint16_t, u16Fsw);
9018 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9019 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9021
9022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9026
9027 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9028 IEM_MC_PREPARE_FPU_USAGE();
9029 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9030 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9031 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9032 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9033 IEM_MC_ELSE()
9034 IEM_MC_IF_FCW_IM()
9035 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9036 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9037 IEM_MC_ENDIF();
9038 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9039 IEM_MC_ENDIF();
9040 IEM_MC_ADVANCE_RIP();
9041
9042 IEM_MC_END();
9043 return VINF_SUCCESS;
9044}
9045
9046
9047/** Opcode 0xdd !11/0. */
9048FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9049{
9050 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9051 IEM_MC_BEGIN(3, 2);
9052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9053 IEM_MC_LOCAL(uint16_t, u16Fsw);
9054 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9055 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9057
9058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9062
9063 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9064 IEM_MC_PREPARE_FPU_USAGE();
9065 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9066 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9067 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9068 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9069 IEM_MC_ELSE()
9070 IEM_MC_IF_FCW_IM()
9071 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9072 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9073 IEM_MC_ENDIF();
9074 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9075 IEM_MC_ENDIF();
9076 IEM_MC_ADVANCE_RIP();
9077
9078 IEM_MC_END();
9079 return VINF_SUCCESS;
9080}
9081
9082
9083
9084
9085/** Opcode 0xdd !11/0. */
9086FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9089 IEM_MC_BEGIN(3, 2);
9090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9091 IEM_MC_LOCAL(uint16_t, u16Fsw);
9092 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9093 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9095
9096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9100
9101 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9102 IEM_MC_PREPARE_FPU_USAGE();
9103 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9104 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9105 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9106 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9107 IEM_MC_ELSE()
9108 IEM_MC_IF_FCW_IM()
9109 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9110 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9111 IEM_MC_ENDIF();
9112 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9113 IEM_MC_ENDIF();
9114 IEM_MC_ADVANCE_RIP();
9115
9116 IEM_MC_END();
9117 return VINF_SUCCESS;
9118}
9119
9120
9121/** Opcode 0xdd !11/0. */
9122FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9123{
9124 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9125 IEM_MC_BEGIN(3, 0);
9126 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9127 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9128 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9132 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9133 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9134 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9135 IEM_MC_END();
9136 return VINF_SUCCESS;
9137}
9138
9139
9140/** Opcode 0xdd !11/0. */
9141FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9142{
9143 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9144 IEM_MC_BEGIN(3, 0);
9145 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9146 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9147 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9151 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9152 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9153 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9154 IEM_MC_END();
9155 return VINF_SUCCESS;
9156
9157}
9158
9159/** Opcode 0xdd !11/0. */
9160FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9161{
9162 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9163
9164 IEM_MC_BEGIN(0, 2);
9165 IEM_MC_LOCAL(uint16_t, u16Tmp);
9166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9167
9168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9170 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9171
9172 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9173 IEM_MC_FETCH_FSW(u16Tmp);
9174 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9175 IEM_MC_ADVANCE_RIP();
9176
9177/** @todo Debug / drop a hint to the verifier that things may differ
9178 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9179 * NT4SP1. (X86_FSW_PE) */
9180 IEM_MC_END();
9181 return VINF_SUCCESS;
9182}
9183
9184
9185/** Opcode 0xdd 11/0. */
9186FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9187{
9188 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9190 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9191 unmodified. */
9192
9193 IEM_MC_BEGIN(0, 0);
9194
9195 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9196 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9197
9198 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9199 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9200 IEM_MC_UPDATE_FPU_OPCODE_IP();
9201
9202 IEM_MC_ADVANCE_RIP();
9203 IEM_MC_END();
9204 return VINF_SUCCESS;
9205}
9206
9207
9208/** Opcode 0xdd 11/1. */
9209FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9210{
9211 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9213
9214 IEM_MC_BEGIN(0, 2);
9215 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9216 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9219
9220 IEM_MC_PREPARE_FPU_USAGE();
9221 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9222 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9223 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9224 IEM_MC_ELSE()
9225 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9226 IEM_MC_ENDIF();
9227
9228 IEM_MC_ADVANCE_RIP();
9229 IEM_MC_END();
9230 return VINF_SUCCESS;
9231}
9232
9233
9234/** Opcode 0xdd 11/3. */
9235FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9236{
9237 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9238 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9239}
9240
9241
9242/** Opcode 0xdd 11/4. */
9243FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9244{
9245 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9246 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9247}
9248
9249
9250/**
9251 * @opcode 0xdd
9252 */
9253FNIEMOP_DEF(iemOp_EscF5)
9254{
9255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9256 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9258 {
9259 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9260 {
9261 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9262 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9263 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9264 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9265 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9266 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9267 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9268 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9270 }
9271 }
9272 else
9273 {
9274 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9275 {
9276 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9277 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9278 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9279 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9280 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9281 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9282 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9283 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9285 }
9286 }
9287}
9288
9289
9290/** Opcode 0xde 11/0. */
9291FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9292{
9293 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9294 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9295}
9296
9297
9298/** Opcode 0xde 11/0. */
9299FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9300{
9301 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9302 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9303}
9304
9305
9306/** Opcode 0xde 0xd9. */
9307FNIEMOP_DEF(iemOp_fcompp)
9308{
9309 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9310 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9311}
9312
9313
9314/** Opcode 0xde 11/4. */
9315FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9316{
9317 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9318 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9319}
9320
9321
9322/** Opcode 0xde 11/5. */
9323FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9324{
9325 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9326 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9327}
9328
9329
9330/** Opcode 0xde 11/6. */
9331FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9332{
9333 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9334 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9335}
9336
9337
9338/** Opcode 0xde 11/7. */
9339FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9340{
9341 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9342 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9343}
9344
9345
9346/**
9347 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9348 * the result in ST0.
9349 *
9350 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9351 */
9352FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9353{
9354 IEM_MC_BEGIN(3, 3);
9355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9356 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9357 IEM_MC_LOCAL(int16_t, i16Val2);
9358 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9359 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9360 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9361
9362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9364
9365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9367 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9368
9369 IEM_MC_PREPARE_FPU_USAGE();
9370 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9371 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9372 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9373 IEM_MC_ELSE()
9374 IEM_MC_FPU_STACK_UNDERFLOW(0);
9375 IEM_MC_ENDIF();
9376 IEM_MC_ADVANCE_RIP();
9377
9378 IEM_MC_END();
9379 return VINF_SUCCESS;
9380}
9381
9382
9383/** Opcode 0xde !11/0. */
9384FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9385{
9386 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9387 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9388}
9389
9390
9391/** Opcode 0xde !11/1. */
9392FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9393{
9394 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9395 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9396}
9397
9398
9399/** Opcode 0xde !11/2. */
9400FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9401{
9402 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9403
9404 IEM_MC_BEGIN(3, 3);
9405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9406 IEM_MC_LOCAL(uint16_t, u16Fsw);
9407 IEM_MC_LOCAL(int16_t, i16Val2);
9408 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9409 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9410 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9411
9412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9414
9415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9417 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9418
9419 IEM_MC_PREPARE_FPU_USAGE();
9420 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9421 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9422 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9423 IEM_MC_ELSE()
9424 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9425 IEM_MC_ENDIF();
9426 IEM_MC_ADVANCE_RIP();
9427
9428 IEM_MC_END();
9429 return VINF_SUCCESS;
9430}
9431
9432
9433/** Opcode 0xde !11/3. */
9434FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9435{
9436 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9437
9438 IEM_MC_BEGIN(3, 3);
9439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9440 IEM_MC_LOCAL(uint16_t, u16Fsw);
9441 IEM_MC_LOCAL(int16_t, i16Val2);
9442 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9443 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9444 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9445
9446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9448
9449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9451 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9452
9453 IEM_MC_PREPARE_FPU_USAGE();
9454 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9455 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9456 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9457 IEM_MC_ELSE()
9458 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9459 IEM_MC_ENDIF();
9460 IEM_MC_ADVANCE_RIP();
9461
9462 IEM_MC_END();
9463 return VINF_SUCCESS;
9464}
9465
9466
9467/** Opcode 0xde !11/4. */
9468FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9469{
9470 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9471 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9472}
9473
9474
9475/** Opcode 0xde !11/5. */
9476FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9477{
9478 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9479 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9480}
9481
9482
9483/** Opcode 0xde !11/6. */
9484FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9485{
9486 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9487 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9488}
9489
9490
9491/** Opcode 0xde !11/7. */
9492FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9493{
9494 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9496}
9497
9498
9499/**
9500 * @opcode 0xde
9501 */
9502FNIEMOP_DEF(iemOp_EscF6)
9503{
9504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9505 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9507 {
9508 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9509 {
9510 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9511 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9512 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9513 case 3: if (bRm == 0xd9)
9514 return FNIEMOP_CALL(iemOp_fcompp);
9515 return IEMOP_RAISE_INVALID_OPCODE();
9516 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9517 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9518 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9519 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9521 }
9522 }
9523 else
9524 {
9525 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9526 {
9527 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9528 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9529 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9530 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9531 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9532 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9533 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9534 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9536 }
9537 }
9538}
9539
9540
9541/** Opcode 0xdf 11/0.
9542 * Undocument instruction, assumed to work like ffree + fincstp. */
9543FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9544{
9545 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9547
9548 IEM_MC_BEGIN(0, 0);
9549
9550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9551 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9552
9553 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9554 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9555 IEM_MC_FPU_STACK_INC_TOP();
9556 IEM_MC_UPDATE_FPU_OPCODE_IP();
9557
9558 IEM_MC_ADVANCE_RIP();
9559 IEM_MC_END();
9560 return VINF_SUCCESS;
9561}
9562
9563
9564/** Opcode 0xdf 0xe0. */
9565FNIEMOP_DEF(iemOp_fnstsw_ax)
9566{
9567 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9569
9570 IEM_MC_BEGIN(0, 1);
9571 IEM_MC_LOCAL(uint16_t, u16Tmp);
9572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9573 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9574 IEM_MC_FETCH_FSW(u16Tmp);
9575 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9576 IEM_MC_ADVANCE_RIP();
9577 IEM_MC_END();
9578 return VINF_SUCCESS;
9579}
9580
9581
9582/** Opcode 0xdf 11/5. */
9583FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9586 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9587}
9588
9589
9590/** Opcode 0xdf 11/6. */
9591FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9592{
9593 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9594 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9595}
9596
9597
9598/** Opcode 0xdf !11/0. */
9599FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9600{
9601 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9602
9603 IEM_MC_BEGIN(2, 3);
9604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9605 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9606 IEM_MC_LOCAL(int16_t, i16Val);
9607 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9608 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9609
9610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9612
9613 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9614 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9615 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9616
9617 IEM_MC_PREPARE_FPU_USAGE();
9618 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9619 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9620 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9621 IEM_MC_ELSE()
9622 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9623 IEM_MC_ENDIF();
9624 IEM_MC_ADVANCE_RIP();
9625
9626 IEM_MC_END();
9627 return VINF_SUCCESS;
9628}
9629
9630
9631/** Opcode 0xdf !11/1. */
9632FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9633{
9634 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9635 IEM_MC_BEGIN(3, 2);
9636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9637 IEM_MC_LOCAL(uint16_t, u16Fsw);
9638 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9639 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9640 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9641
9642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9646
9647 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9648 IEM_MC_PREPARE_FPU_USAGE();
9649 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9650 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9651 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9652 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9653 IEM_MC_ELSE()
9654 IEM_MC_IF_FCW_IM()
9655 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9656 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9657 IEM_MC_ENDIF();
9658 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9659 IEM_MC_ENDIF();
9660 IEM_MC_ADVANCE_RIP();
9661
9662 IEM_MC_END();
9663 return VINF_SUCCESS;
9664}
9665
9666
9667/** Opcode 0xdf !11/2. */
9668FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9669{
9670 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9671 IEM_MC_BEGIN(3, 2);
9672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9673 IEM_MC_LOCAL(uint16_t, u16Fsw);
9674 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9675 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9676 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9677
9678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9680 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9682
9683 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9684 IEM_MC_PREPARE_FPU_USAGE();
9685 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9686 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9687 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9688 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9689 IEM_MC_ELSE()
9690 IEM_MC_IF_FCW_IM()
9691 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9692 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9693 IEM_MC_ENDIF();
9694 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9695 IEM_MC_ENDIF();
9696 IEM_MC_ADVANCE_RIP();
9697
9698 IEM_MC_END();
9699 return VINF_SUCCESS;
9700}
9701
9702
9703/** Opcode 0xdf !11/3. */
9704FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9705{
9706 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9707 IEM_MC_BEGIN(3, 2);
9708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9709 IEM_MC_LOCAL(uint16_t, u16Fsw);
9710 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9711 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9712 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9713
9714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9718
9719 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9720 IEM_MC_PREPARE_FPU_USAGE();
9721 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9722 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9723 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9724 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9725 IEM_MC_ELSE()
9726 IEM_MC_IF_FCW_IM()
9727 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9728 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9729 IEM_MC_ENDIF();
9730 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9731 IEM_MC_ENDIF();
9732 IEM_MC_ADVANCE_RIP();
9733
9734 IEM_MC_END();
9735 return VINF_SUCCESS;
9736}
9737
9738
9739/** Opcode 0xdf !11/4. */
9740FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9741
9742
9743/** Opcode 0xdf !11/5. */
9744FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9747
9748 IEM_MC_BEGIN(2, 3);
9749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9750 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9751 IEM_MC_LOCAL(int64_t, i64Val);
9752 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9753 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9754
9755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9757
9758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9760 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9761
9762 IEM_MC_PREPARE_FPU_USAGE();
9763 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9764 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9765 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9766 IEM_MC_ELSE()
9767 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9768 IEM_MC_ENDIF();
9769 IEM_MC_ADVANCE_RIP();
9770
9771 IEM_MC_END();
9772 return VINF_SUCCESS;
9773}
9774
9775
9776/** Opcode 0xdf !11/6. */
9777FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9778
9779
9780/** Opcode 0xdf !11/7. */
9781FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9782{
9783 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9784 IEM_MC_BEGIN(3, 2);
9785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9786 IEM_MC_LOCAL(uint16_t, u16Fsw);
9787 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9788 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9790
9791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9795
9796 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9797 IEM_MC_PREPARE_FPU_USAGE();
9798 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9799 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9800 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9801 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9802 IEM_MC_ELSE()
9803 IEM_MC_IF_FCW_IM()
9804 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9805 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9806 IEM_MC_ENDIF();
9807 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9808 IEM_MC_ENDIF();
9809 IEM_MC_ADVANCE_RIP();
9810
9811 IEM_MC_END();
9812 return VINF_SUCCESS;
9813}
9814
9815
9816/**
9817 * @opcode 0xdf
9818 */
9819FNIEMOP_DEF(iemOp_EscF7)
9820{
9821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9822 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9823 {
9824 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9825 {
9826 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9827 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9828 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9829 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9830 case 4: if (bRm == 0xe0)
9831 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9832 return IEMOP_RAISE_INVALID_OPCODE();
9833 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9834 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9835 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9837 }
9838 }
9839 else
9840 {
9841 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9842 {
9843 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9844 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9845 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9846 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9847 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9848 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9849 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9850 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9852 }
9853 }
9854}
9855
9856
9857/**
9858 * @opcode 0xe0
9859 */
9860FNIEMOP_DEF(iemOp_loopne_Jb)
9861{
9862 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9863 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9866
9867 switch (pVCpu->iem.s.enmEffAddrMode)
9868 {
9869 case IEMMODE_16BIT:
9870 IEM_MC_BEGIN(0,0);
9871 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9872 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9873 IEM_MC_REL_JMP_S8(i8Imm);
9874 } IEM_MC_ELSE() {
9875 IEM_MC_ADVANCE_RIP();
9876 } IEM_MC_ENDIF();
9877 IEM_MC_END();
9878 return VINF_SUCCESS;
9879
9880 case IEMMODE_32BIT:
9881 IEM_MC_BEGIN(0,0);
9882 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9883 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9884 IEM_MC_REL_JMP_S8(i8Imm);
9885 } IEM_MC_ELSE() {
9886 IEM_MC_ADVANCE_RIP();
9887 } IEM_MC_ENDIF();
9888 IEM_MC_END();
9889 return VINF_SUCCESS;
9890
9891 case IEMMODE_64BIT:
9892 IEM_MC_BEGIN(0,0);
9893 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9894 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9895 IEM_MC_REL_JMP_S8(i8Imm);
9896 } IEM_MC_ELSE() {
9897 IEM_MC_ADVANCE_RIP();
9898 } IEM_MC_ENDIF();
9899 IEM_MC_END();
9900 return VINF_SUCCESS;
9901
9902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9903 }
9904}
9905
9906
9907/**
9908 * @opcode 0xe1
9909 */
9910FNIEMOP_DEF(iemOp_loope_Jb)
9911{
9912 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9913 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9916
9917 switch (pVCpu->iem.s.enmEffAddrMode)
9918 {
9919 case IEMMODE_16BIT:
9920 IEM_MC_BEGIN(0,0);
9921 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9922 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9923 IEM_MC_REL_JMP_S8(i8Imm);
9924 } IEM_MC_ELSE() {
9925 IEM_MC_ADVANCE_RIP();
9926 } IEM_MC_ENDIF();
9927 IEM_MC_END();
9928 return VINF_SUCCESS;
9929
9930 case IEMMODE_32BIT:
9931 IEM_MC_BEGIN(0,0);
9932 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9933 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9934 IEM_MC_REL_JMP_S8(i8Imm);
9935 } IEM_MC_ELSE() {
9936 IEM_MC_ADVANCE_RIP();
9937 } IEM_MC_ENDIF();
9938 IEM_MC_END();
9939 return VINF_SUCCESS;
9940
9941 case IEMMODE_64BIT:
9942 IEM_MC_BEGIN(0,0);
9943 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9944 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9945 IEM_MC_REL_JMP_S8(i8Imm);
9946 } IEM_MC_ELSE() {
9947 IEM_MC_ADVANCE_RIP();
9948 } IEM_MC_ENDIF();
9949 IEM_MC_END();
9950 return VINF_SUCCESS;
9951
9952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9953 }
9954}
9955
9956
9957/**
9958 * @opcode 0xe2
9959 */
9960FNIEMOP_DEF(iemOp_loop_Jb)
9961{
9962 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9963 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9966
9967 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9968 * using the 32-bit operand size override. How can that be restarted? See
9969 * weird pseudo code in intel manual. */
9970 switch (pVCpu->iem.s.enmEffAddrMode)
9971 {
9972 case IEMMODE_16BIT:
9973 IEM_MC_BEGIN(0,0);
9974 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9975 {
9976 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9977 IEM_MC_IF_CX_IS_NZ() {
9978 IEM_MC_REL_JMP_S8(i8Imm);
9979 } IEM_MC_ELSE() {
9980 IEM_MC_ADVANCE_RIP();
9981 } IEM_MC_ENDIF();
9982 }
9983 else
9984 {
9985 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9986 IEM_MC_ADVANCE_RIP();
9987 }
9988 IEM_MC_END();
9989 return VINF_SUCCESS;
9990
9991 case IEMMODE_32BIT:
9992 IEM_MC_BEGIN(0,0);
9993 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9994 {
9995 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9996 IEM_MC_IF_ECX_IS_NZ() {
9997 IEM_MC_REL_JMP_S8(i8Imm);
9998 } IEM_MC_ELSE() {
9999 IEM_MC_ADVANCE_RIP();
10000 } IEM_MC_ENDIF();
10001 }
10002 else
10003 {
10004 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10005 IEM_MC_ADVANCE_RIP();
10006 }
10007 IEM_MC_END();
10008 return VINF_SUCCESS;
10009
10010 case IEMMODE_64BIT:
10011 IEM_MC_BEGIN(0,0);
10012 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10013 {
10014 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10015 IEM_MC_IF_RCX_IS_NZ() {
10016 IEM_MC_REL_JMP_S8(i8Imm);
10017 } IEM_MC_ELSE() {
10018 IEM_MC_ADVANCE_RIP();
10019 } IEM_MC_ENDIF();
10020 }
10021 else
10022 {
10023 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10024 IEM_MC_ADVANCE_RIP();
10025 }
10026 IEM_MC_END();
10027 return VINF_SUCCESS;
10028
10029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10030 }
10031}
10032
10033
10034/**
10035 * @opcode 0xe3
10036 */
10037FNIEMOP_DEF(iemOp_jecxz_Jb)
10038{
10039 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10040 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10042 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10043
10044 switch (pVCpu->iem.s.enmEffAddrMode)
10045 {
10046 case IEMMODE_16BIT:
10047 IEM_MC_BEGIN(0,0);
10048 IEM_MC_IF_CX_IS_NZ() {
10049 IEM_MC_ADVANCE_RIP();
10050 } IEM_MC_ELSE() {
10051 IEM_MC_REL_JMP_S8(i8Imm);
10052 } IEM_MC_ENDIF();
10053 IEM_MC_END();
10054 return VINF_SUCCESS;
10055
10056 case IEMMODE_32BIT:
10057 IEM_MC_BEGIN(0,0);
10058 IEM_MC_IF_ECX_IS_NZ() {
10059 IEM_MC_ADVANCE_RIP();
10060 } IEM_MC_ELSE() {
10061 IEM_MC_REL_JMP_S8(i8Imm);
10062 } IEM_MC_ENDIF();
10063 IEM_MC_END();
10064 return VINF_SUCCESS;
10065
10066 case IEMMODE_64BIT:
10067 IEM_MC_BEGIN(0,0);
10068 IEM_MC_IF_RCX_IS_NZ() {
10069 IEM_MC_ADVANCE_RIP();
10070 } IEM_MC_ELSE() {
10071 IEM_MC_REL_JMP_S8(i8Imm);
10072 } IEM_MC_ENDIF();
10073 IEM_MC_END();
10074 return VINF_SUCCESS;
10075
10076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10077 }
10078}
10079
10080
10081/** Opcode 0xe4 */
10082FNIEMOP_DEF(iemOp_in_AL_Ib)
10083{
10084 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10085 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10087 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10088}
10089
10090
10091/** Opcode 0xe5 */
10092FNIEMOP_DEF(iemOp_in_eAX_Ib)
10093{
10094 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10095 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10097 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10098}
10099
10100
10101/** Opcode 0xe6 */
10102FNIEMOP_DEF(iemOp_out_Ib_AL)
10103{
10104 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10105 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10108}
10109
10110
10111/** Opcode 0xe7 */
10112FNIEMOP_DEF(iemOp_out_Ib_eAX)
10113{
10114 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10115 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10117 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10118}
10119
10120
10121/**
10122 * @opcode 0xe8
10123 */
10124FNIEMOP_DEF(iemOp_call_Jv)
10125{
10126 IEMOP_MNEMONIC(call_Jv, "call Jv");
10127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10128 switch (pVCpu->iem.s.enmEffOpSize)
10129 {
10130 case IEMMODE_16BIT:
10131 {
10132 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10133 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10134 }
10135
10136 case IEMMODE_32BIT:
10137 {
10138 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10139 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10140 }
10141
10142 case IEMMODE_64BIT:
10143 {
10144 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10145 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10146 }
10147
10148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10149 }
10150}
10151
10152
10153/**
10154 * @opcode 0xe9
10155 */
10156FNIEMOP_DEF(iemOp_jmp_Jv)
10157{
10158 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10160 switch (pVCpu->iem.s.enmEffOpSize)
10161 {
10162 case IEMMODE_16BIT:
10163 {
10164 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10165 IEM_MC_BEGIN(0, 0);
10166 IEM_MC_REL_JMP_S16(i16Imm);
10167 IEM_MC_END();
10168 return VINF_SUCCESS;
10169 }
10170
10171 case IEMMODE_64BIT:
10172 case IEMMODE_32BIT:
10173 {
10174 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10175 IEM_MC_BEGIN(0, 0);
10176 IEM_MC_REL_JMP_S32(i32Imm);
10177 IEM_MC_END();
10178 return VINF_SUCCESS;
10179 }
10180
10181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10182 }
10183}
10184
10185
10186/**
10187 * @opcode 0xea
10188 */
10189FNIEMOP_DEF(iemOp_jmp_Ap)
10190{
10191 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10192 IEMOP_HLP_NO_64BIT();
10193
10194 /* Decode the far pointer address and pass it on to the far call C implementation. */
10195 uint32_t offSeg;
10196 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10197 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10198 else
10199 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10200 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10202 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10203}
10204
10205
10206/**
10207 * @opcode 0xeb
10208 */
10209FNIEMOP_DEF(iemOp_jmp_Jb)
10210{
10211 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10212 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10214 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10215
10216 IEM_MC_BEGIN(0, 0);
10217 IEM_MC_REL_JMP_S8(i8Imm);
10218 IEM_MC_END();
10219 return VINF_SUCCESS;
10220}
10221
10222
10223/** Opcode 0xec */
10224FNIEMOP_DEF(iemOp_in_AL_DX)
10225{
10226 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10228 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10229}
10230
10231
10232/** Opcode 0xed */
10233FNIEMOP_DEF(iemOp_eAX_DX)
10234{
10235 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10237 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10238}
10239
10240
10241/** Opcode 0xee */
10242FNIEMOP_DEF(iemOp_out_DX_AL)
10243{
10244 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10246 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10247}
10248
10249
10250/** Opcode 0xef */
10251FNIEMOP_DEF(iemOp_out_DX_eAX)
10252{
10253 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10255 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10256}
10257
10258
10259/**
10260 * @opcode 0xf0
10261 */
10262FNIEMOP_DEF(iemOp_lock)
10263{
10264 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10266
10267 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10268 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10269}
10270
10271
10272/**
10273 * @opcode 0xf1
10274 */
10275FNIEMOP_DEF(iemOp_int1)
10276{
10277 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10278 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10279 /** @todo testcase! */
10280 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10281}
10282
10283
10284/**
10285 * @opcode 0xf2
10286 */
10287FNIEMOP_DEF(iemOp_repne)
10288{
10289 /* This overrides any previous REPE prefix. */
10290 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10291 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10292 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10293
10294 /* For the 4 entry opcode tables, REPNZ overrides any previous
10295 REPZ and operand size prefixes. */
10296 pVCpu->iem.s.idxPrefix = 3;
10297
10298 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10299 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10300}
10301
10302
10303/**
10304 * @opcode 0xf3
10305 */
10306FNIEMOP_DEF(iemOp_repe)
10307{
10308 /* This overrides any previous REPNE prefix. */
10309 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10310 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10311 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10312
10313 /* For the 4 entry opcode tables, REPNZ overrides any previous
10314 REPNZ and operand size prefixes. */
10315 pVCpu->iem.s.idxPrefix = 2;
10316
10317 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10318 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10319}
10320
10321
10322/**
10323 * @opcode 0xf4
10324 */
10325FNIEMOP_DEF(iemOp_hlt)
10326{
10327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10328 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10329}
10330
10331
10332/**
10333 * @opcode 0xf5
10334 */
10335FNIEMOP_DEF(iemOp_cmc)
10336{
10337 IEMOP_MNEMONIC(cmc, "cmc");
10338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10339 IEM_MC_BEGIN(0, 0);
10340 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10341 IEM_MC_ADVANCE_RIP();
10342 IEM_MC_END();
10343 return VINF_SUCCESS;
10344}
10345
10346
10347/**
10348 * Common implementation of 'inc/dec/not/neg Eb'.
10349 *
10350 * @param bRm The RM byte.
10351 * @param pImpl The instruction implementation.
10352 */
10353FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10354{
10355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10356 {
10357 /* register access */
10358 IEM_MC_BEGIN(2, 0);
10359 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10360 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10361 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10362 IEM_MC_REF_EFLAGS(pEFlags);
10363 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10364 IEM_MC_ADVANCE_RIP();
10365 IEM_MC_END();
10366 }
10367 else
10368 {
10369 /* memory access. */
10370 IEM_MC_BEGIN(2, 2);
10371 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10374
10375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10376 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10377 IEM_MC_FETCH_EFLAGS(EFlags);
10378 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10379 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10380 else
10381 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10382
10383 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10384 IEM_MC_COMMIT_EFLAGS(EFlags);
10385 IEM_MC_ADVANCE_RIP();
10386 IEM_MC_END();
10387 }
10388 return VINF_SUCCESS;
10389}
10390
10391
10392/**
10393 * Common implementation of 'inc/dec/not/neg Ev'.
10394 *
10395 * @param bRm The RM byte.
10396 * @param pImpl The instruction implementation.
10397 */
10398FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10399{
10400 /* Registers are handled by a common worker. */
10401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10403
10404 /* Memory we do here. */
10405 switch (pVCpu->iem.s.enmEffOpSize)
10406 {
10407 case IEMMODE_16BIT:
10408 IEM_MC_BEGIN(2, 2);
10409 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10412
10413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10414 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10415 IEM_MC_FETCH_EFLAGS(EFlags);
10416 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10417 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10418 else
10419 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10420
10421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10422 IEM_MC_COMMIT_EFLAGS(EFlags);
10423 IEM_MC_ADVANCE_RIP();
10424 IEM_MC_END();
10425 return VINF_SUCCESS;
10426
10427 case IEMMODE_32BIT:
10428 IEM_MC_BEGIN(2, 2);
10429 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10430 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10432
10433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10434 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10435 IEM_MC_FETCH_EFLAGS(EFlags);
10436 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10437 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10438 else
10439 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10440
10441 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10442 IEM_MC_COMMIT_EFLAGS(EFlags);
10443 IEM_MC_ADVANCE_RIP();
10444 IEM_MC_END();
10445 return VINF_SUCCESS;
10446
10447 case IEMMODE_64BIT:
10448 IEM_MC_BEGIN(2, 2);
10449 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10452
10453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10454 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10455 IEM_MC_FETCH_EFLAGS(EFlags);
10456 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10457 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10458 else
10459 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10460
10461 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10462 IEM_MC_COMMIT_EFLAGS(EFlags);
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 return VINF_SUCCESS;
10466
10467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10468 }
10469}
10470
10471
10472/** Opcode 0xf6 /0. */
10473FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10474{
10475 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10476 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10477
10478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10479 {
10480 /* register access */
10481 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10483
10484 IEM_MC_BEGIN(3, 0);
10485 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10486 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10487 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10488 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10489 IEM_MC_REF_EFLAGS(pEFlags);
10490 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10491 IEM_MC_ADVANCE_RIP();
10492 IEM_MC_END();
10493 }
10494 else
10495 {
10496 /* memory access. */
10497 IEM_MC_BEGIN(3, 2);
10498 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10499 IEM_MC_ARG(uint8_t, u8Src, 1);
10500 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10502
10503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10504 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10505 IEM_MC_ASSIGN(u8Src, u8Imm);
10506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10507 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10508 IEM_MC_FETCH_EFLAGS(EFlags);
10509 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10510
10511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10512 IEM_MC_COMMIT_EFLAGS(EFlags);
10513 IEM_MC_ADVANCE_RIP();
10514 IEM_MC_END();
10515 }
10516 return VINF_SUCCESS;
10517}
10518
10519
10520/** Opcode 0xf7 /0. */
10521FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10522{
10523 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10524 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10525
10526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10527 {
10528 /* register access */
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 switch (pVCpu->iem.s.enmEffOpSize)
10531 {
10532 case IEMMODE_16BIT:
10533 {
10534 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10535 IEM_MC_BEGIN(3, 0);
10536 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10537 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10539 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10540 IEM_MC_REF_EFLAGS(pEFlags);
10541 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10542 IEM_MC_ADVANCE_RIP();
10543 IEM_MC_END();
10544 return VINF_SUCCESS;
10545 }
10546
10547 case IEMMODE_32BIT:
10548 {
10549 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10550 IEM_MC_BEGIN(3, 0);
10551 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10552 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10554 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10555 IEM_MC_REF_EFLAGS(pEFlags);
10556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10557 /* No clearing the high dword here - test doesn't write back the result. */
10558 IEM_MC_ADVANCE_RIP();
10559 IEM_MC_END();
10560 return VINF_SUCCESS;
10561 }
10562
10563 case IEMMODE_64BIT:
10564 {
10565 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10566 IEM_MC_BEGIN(3, 0);
10567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10568 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10569 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10570 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10571 IEM_MC_REF_EFLAGS(pEFlags);
10572 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10573 IEM_MC_ADVANCE_RIP();
10574 IEM_MC_END();
10575 return VINF_SUCCESS;
10576 }
10577
10578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10579 }
10580 }
10581 else
10582 {
10583 /* memory access. */
10584 switch (pVCpu->iem.s.enmEffOpSize)
10585 {
10586 case IEMMODE_16BIT:
10587 {
10588 IEM_MC_BEGIN(3, 2);
10589 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10590 IEM_MC_ARG(uint16_t, u16Src, 1);
10591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10593
10594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10595 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10596 IEM_MC_ASSIGN(u16Src, u16Imm);
10597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10598 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10599 IEM_MC_FETCH_EFLAGS(EFlags);
10600 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10601
10602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10603 IEM_MC_COMMIT_EFLAGS(EFlags);
10604 IEM_MC_ADVANCE_RIP();
10605 IEM_MC_END();
10606 return VINF_SUCCESS;
10607 }
10608
10609 case IEMMODE_32BIT:
10610 {
10611 IEM_MC_BEGIN(3, 2);
10612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10613 IEM_MC_ARG(uint32_t, u32Src, 1);
10614 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10616
10617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10618 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10619 IEM_MC_ASSIGN(u32Src, u32Imm);
10620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10621 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10622 IEM_MC_FETCH_EFLAGS(EFlags);
10623 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10624
10625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10626 IEM_MC_COMMIT_EFLAGS(EFlags);
10627 IEM_MC_ADVANCE_RIP();
10628 IEM_MC_END();
10629 return VINF_SUCCESS;
10630 }
10631
10632 case IEMMODE_64BIT:
10633 {
10634 IEM_MC_BEGIN(3, 2);
10635 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10636 IEM_MC_ARG(uint64_t, u64Src, 1);
10637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10639
10640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10641 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10642 IEM_MC_ASSIGN(u64Src, u64Imm);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10645 IEM_MC_FETCH_EFLAGS(EFlags);
10646 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10647
10648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10649 IEM_MC_COMMIT_EFLAGS(EFlags);
10650 IEM_MC_ADVANCE_RIP();
10651 IEM_MC_END();
10652 return VINF_SUCCESS;
10653 }
10654
10655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10656 }
10657 }
10658}
10659
10660
10661/** Opcode 0xf6 /4, /5, /6 and /7. */
10662FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10663{
10664 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10665 {
10666 /* register access */
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668 IEM_MC_BEGIN(3, 1);
10669 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10670 IEM_MC_ARG(uint8_t, u8Value, 1);
10671 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10672 IEM_MC_LOCAL(int32_t, rc);
10673
10674 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10675 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10676 IEM_MC_REF_EFLAGS(pEFlags);
10677 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10678 IEM_MC_IF_LOCAL_IS_Z(rc) {
10679 IEM_MC_ADVANCE_RIP();
10680 } IEM_MC_ELSE() {
10681 IEM_MC_RAISE_DIVIDE_ERROR();
10682 } IEM_MC_ENDIF();
10683
10684 IEM_MC_END();
10685 }
10686 else
10687 {
10688 /* memory access. */
10689 IEM_MC_BEGIN(3, 2);
10690 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10691 IEM_MC_ARG(uint8_t, u8Value, 1);
10692 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10694 IEM_MC_LOCAL(int32_t, rc);
10695
10696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10698 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10699 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10700 IEM_MC_REF_EFLAGS(pEFlags);
10701 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10702 IEM_MC_IF_LOCAL_IS_Z(rc) {
10703 IEM_MC_ADVANCE_RIP();
10704 } IEM_MC_ELSE() {
10705 IEM_MC_RAISE_DIVIDE_ERROR();
10706 } IEM_MC_ENDIF();
10707
10708 IEM_MC_END();
10709 }
10710 return VINF_SUCCESS;
10711}
10712
10713
10714/** Opcode 0xf7 /4, /5, /6 and /7. */
10715FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10716{
10717 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10718
10719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10720 {
10721 /* register access */
10722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10723 switch (pVCpu->iem.s.enmEffOpSize)
10724 {
10725 case IEMMODE_16BIT:
10726 {
10727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10728 IEM_MC_BEGIN(4, 1);
10729 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10730 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10731 IEM_MC_ARG(uint16_t, u16Value, 2);
10732 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10733 IEM_MC_LOCAL(int32_t, rc);
10734
10735 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10736 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10737 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10738 IEM_MC_REF_EFLAGS(pEFlags);
10739 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10740 IEM_MC_IF_LOCAL_IS_Z(rc) {
10741 IEM_MC_ADVANCE_RIP();
10742 } IEM_MC_ELSE() {
10743 IEM_MC_RAISE_DIVIDE_ERROR();
10744 } IEM_MC_ENDIF();
10745
10746 IEM_MC_END();
10747 return VINF_SUCCESS;
10748 }
10749
10750 case IEMMODE_32BIT:
10751 {
10752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10753 IEM_MC_BEGIN(4, 1);
10754 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10755 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10756 IEM_MC_ARG(uint32_t, u32Value, 2);
10757 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10758 IEM_MC_LOCAL(int32_t, rc);
10759
10760 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10761 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10762 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10763 IEM_MC_REF_EFLAGS(pEFlags);
10764 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10765 IEM_MC_IF_LOCAL_IS_Z(rc) {
10766 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10767 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10768 IEM_MC_ADVANCE_RIP();
10769 } IEM_MC_ELSE() {
10770 IEM_MC_RAISE_DIVIDE_ERROR();
10771 } IEM_MC_ENDIF();
10772
10773 IEM_MC_END();
10774 return VINF_SUCCESS;
10775 }
10776
10777 case IEMMODE_64BIT:
10778 {
10779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10780 IEM_MC_BEGIN(4, 1);
10781 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10782 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10783 IEM_MC_ARG(uint64_t, u64Value, 2);
10784 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10785 IEM_MC_LOCAL(int32_t, rc);
10786
10787 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10788 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10789 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10790 IEM_MC_REF_EFLAGS(pEFlags);
10791 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10792 IEM_MC_IF_LOCAL_IS_Z(rc) {
10793 IEM_MC_ADVANCE_RIP();
10794 } IEM_MC_ELSE() {
10795 IEM_MC_RAISE_DIVIDE_ERROR();
10796 } IEM_MC_ENDIF();
10797
10798 IEM_MC_END();
10799 return VINF_SUCCESS;
10800 }
10801
10802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10803 }
10804 }
10805 else
10806 {
10807 /* memory access. */
10808 switch (pVCpu->iem.s.enmEffOpSize)
10809 {
10810 case IEMMODE_16BIT:
10811 {
10812 IEM_MC_BEGIN(4, 2);
10813 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10814 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10815 IEM_MC_ARG(uint16_t, u16Value, 2);
10816 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10818 IEM_MC_LOCAL(int32_t, rc);
10819
10820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10822 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10823 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10824 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10825 IEM_MC_REF_EFLAGS(pEFlags);
10826 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10827 IEM_MC_IF_LOCAL_IS_Z(rc) {
10828 IEM_MC_ADVANCE_RIP();
10829 } IEM_MC_ELSE() {
10830 IEM_MC_RAISE_DIVIDE_ERROR();
10831 } IEM_MC_ENDIF();
10832
10833 IEM_MC_END();
10834 return VINF_SUCCESS;
10835 }
10836
10837 case IEMMODE_32BIT:
10838 {
10839 IEM_MC_BEGIN(4, 2);
10840 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10841 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10842 IEM_MC_ARG(uint32_t, u32Value, 2);
10843 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10845 IEM_MC_LOCAL(int32_t, rc);
10846
10847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10849 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10850 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10851 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10852 IEM_MC_REF_EFLAGS(pEFlags);
10853 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10854 IEM_MC_IF_LOCAL_IS_Z(rc) {
10855 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10856 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10857 IEM_MC_ADVANCE_RIP();
10858 } IEM_MC_ELSE() {
10859 IEM_MC_RAISE_DIVIDE_ERROR();
10860 } IEM_MC_ENDIF();
10861
10862 IEM_MC_END();
10863 return VINF_SUCCESS;
10864 }
10865
10866 case IEMMODE_64BIT:
10867 {
10868 IEM_MC_BEGIN(4, 2);
10869 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10870 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10871 IEM_MC_ARG(uint64_t, u64Value, 2);
10872 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10874 IEM_MC_LOCAL(int32_t, rc);
10875
10876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10878 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10879 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10880 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10881 IEM_MC_REF_EFLAGS(pEFlags);
10882 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10883 IEM_MC_IF_LOCAL_IS_Z(rc) {
10884 IEM_MC_ADVANCE_RIP();
10885 } IEM_MC_ELSE() {
10886 IEM_MC_RAISE_DIVIDE_ERROR();
10887 } IEM_MC_ENDIF();
10888
10889 IEM_MC_END();
10890 return VINF_SUCCESS;
10891 }
10892
10893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10894 }
10895 }
10896}
10897
10898/**
10899 * @opcode 0xf6
10900 */
10901FNIEMOP_DEF(iemOp_Grp3_Eb)
10902{
10903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10904 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10905 {
10906 case 0:
10907 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10908 case 1:
10909/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10910 return IEMOP_RAISE_INVALID_OPCODE();
10911 case 2:
10912 IEMOP_MNEMONIC(not_Eb, "not Eb");
10913 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10914 case 3:
10915 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10916 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10917 case 4:
10918 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10919 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10920 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10921 case 5:
10922 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10924 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10925 case 6:
10926 IEMOP_MNEMONIC(div_Eb, "div Eb");
10927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10928 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10929 case 7:
10930 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10932 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10934 }
10935}
10936
10937
10938/**
10939 * @opcode 0xf7
10940 */
10941FNIEMOP_DEF(iemOp_Grp3_Ev)
10942{
10943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10944 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10945 {
10946 case 0:
10947 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10948 case 1:
10949/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10950 return IEMOP_RAISE_INVALID_OPCODE();
10951 case 2:
10952 IEMOP_MNEMONIC(not_Ev, "not Ev");
10953 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10954 case 3:
10955 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10956 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10957 case 4:
10958 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10959 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10960 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10961 case 5:
10962 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10963 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10964 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10965 case 6:
10966 IEMOP_MNEMONIC(div_Ev, "div Ev");
10967 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10968 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10969 case 7:
10970 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10972 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10974 }
10975}
10976
10977
10978/**
10979 * @opcode 0xf8
10980 */
10981FNIEMOP_DEF(iemOp_clc)
10982{
10983 IEMOP_MNEMONIC(clc, "clc");
10984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10985 IEM_MC_BEGIN(0, 0);
10986 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10987 IEM_MC_ADVANCE_RIP();
10988 IEM_MC_END();
10989 return VINF_SUCCESS;
10990}
10991
10992
10993/**
10994 * @opcode 0xf9
10995 */
10996FNIEMOP_DEF(iemOp_stc)
10997{
10998 IEMOP_MNEMONIC(stc, "stc");
10999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11000 IEM_MC_BEGIN(0, 0);
11001 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11002 IEM_MC_ADVANCE_RIP();
11003 IEM_MC_END();
11004 return VINF_SUCCESS;
11005}
11006
11007
11008/**
11009 * @opcode 0xfa
11010 */
11011FNIEMOP_DEF(iemOp_cli)
11012{
11013 IEMOP_MNEMONIC(cli, "cli");
11014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11015 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11016}
11017
11018
11019FNIEMOP_DEF(iemOp_sti)
11020{
11021 IEMOP_MNEMONIC(sti, "sti");
11022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11023 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11024}
11025
11026
11027/**
11028 * @opcode 0xfc
11029 */
11030FNIEMOP_DEF(iemOp_cld)
11031{
11032 IEMOP_MNEMONIC(cld, "cld");
11033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11034 IEM_MC_BEGIN(0, 0);
11035 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11036 IEM_MC_ADVANCE_RIP();
11037 IEM_MC_END();
11038 return VINF_SUCCESS;
11039}
11040
11041
11042/**
11043 * @opcode 0xfd
11044 */
11045FNIEMOP_DEF(iemOp_std)
11046{
11047 IEMOP_MNEMONIC(std, "std");
11048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11049 IEM_MC_BEGIN(0, 0);
11050 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11051 IEM_MC_ADVANCE_RIP();
11052 IEM_MC_END();
11053 return VINF_SUCCESS;
11054}
11055
11056
11057/**
11058 * @opcode 0xfe
11059 */
11060FNIEMOP_DEF(iemOp_Grp4)
11061{
11062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11063 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11064 {
11065 case 0:
11066 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11067 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11068 case 1:
11069 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11070 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11071 default:
11072 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11073 return IEMOP_RAISE_INVALID_OPCODE();
11074 }
11075}
11076
11077
11078/**
11079 * Opcode 0xff /2.
11080 * @param bRm The RM byte.
11081 */
11082FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11083{
11084 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11085 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11086
11087 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11088 {
11089 /* The new RIP is taken from a register. */
11090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11091 switch (pVCpu->iem.s.enmEffOpSize)
11092 {
11093 case IEMMODE_16BIT:
11094 IEM_MC_BEGIN(1, 0);
11095 IEM_MC_ARG(uint16_t, u16Target, 0);
11096 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11097 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11098 IEM_MC_END()
11099 return VINF_SUCCESS;
11100
11101 case IEMMODE_32BIT:
11102 IEM_MC_BEGIN(1, 0);
11103 IEM_MC_ARG(uint32_t, u32Target, 0);
11104 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11105 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11106 IEM_MC_END()
11107 return VINF_SUCCESS;
11108
11109 case IEMMODE_64BIT:
11110 IEM_MC_BEGIN(1, 0);
11111 IEM_MC_ARG(uint64_t, u64Target, 0);
11112 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11113 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11114 IEM_MC_END()
11115 return VINF_SUCCESS;
11116
11117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11118 }
11119 }
11120 else
11121 {
11122 /* The new RIP is taken from a register. */
11123 switch (pVCpu->iem.s.enmEffOpSize)
11124 {
11125 case IEMMODE_16BIT:
11126 IEM_MC_BEGIN(1, 1);
11127 IEM_MC_ARG(uint16_t, u16Target, 0);
11128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11131 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11132 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11133 IEM_MC_END()
11134 return VINF_SUCCESS;
11135
11136 case IEMMODE_32BIT:
11137 IEM_MC_BEGIN(1, 1);
11138 IEM_MC_ARG(uint32_t, u32Target, 0);
11139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11142 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11143 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11144 IEM_MC_END()
11145 return VINF_SUCCESS;
11146
11147 case IEMMODE_64BIT:
11148 IEM_MC_BEGIN(1, 1);
11149 IEM_MC_ARG(uint64_t, u64Target, 0);
11150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11154 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11155 IEM_MC_END()
11156 return VINF_SUCCESS;
11157
11158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11159 }
11160 }
11161}
11162
11163typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11164
11165FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11166{
11167 /* Registers? How?? */
11168 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11169 { /* likely */ }
11170 else
11171 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11172
11173 /* Far pointer loaded from memory. */
11174 switch (pVCpu->iem.s.enmEffOpSize)
11175 {
11176 case IEMMODE_16BIT:
11177 IEM_MC_BEGIN(3, 1);
11178 IEM_MC_ARG(uint16_t, u16Sel, 0);
11179 IEM_MC_ARG(uint16_t, offSeg, 1);
11180 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11184 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11185 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11186 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11187 IEM_MC_END();
11188 return VINF_SUCCESS;
11189
11190 case IEMMODE_64BIT:
11191 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11192 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11193 * and call far qword [rsp] encodings. */
11194 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11195 {
11196 IEM_MC_BEGIN(3, 1);
11197 IEM_MC_ARG(uint16_t, u16Sel, 0);
11198 IEM_MC_ARG(uint64_t, offSeg, 1);
11199 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11203 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11204 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11205 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11206 IEM_MC_END();
11207 return VINF_SUCCESS;
11208 }
11209 /* AMD falls thru. */
11210 /* fall thru */
11211
11212 case IEMMODE_32BIT:
11213 IEM_MC_BEGIN(3, 1);
11214 IEM_MC_ARG(uint16_t, u16Sel, 0);
11215 IEM_MC_ARG(uint32_t, offSeg, 1);
11216 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11220 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11221 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11222 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11223 IEM_MC_END();
11224 return VINF_SUCCESS;
11225
11226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11227 }
11228}
11229
11230
11231/**
11232 * Opcode 0xff /3.
11233 * @param bRm The RM byte.
11234 */
11235FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11236{
11237 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11238 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11239}
11240
11241
11242/**
11243 * Opcode 0xff /4.
11244 * @param bRm The RM byte.
11245 */
11246FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11247{
11248 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11249 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11250
11251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11252 {
11253 /* The new RIP is taken from a register. */
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 switch (pVCpu->iem.s.enmEffOpSize)
11256 {
11257 case IEMMODE_16BIT:
11258 IEM_MC_BEGIN(0, 1);
11259 IEM_MC_LOCAL(uint16_t, u16Target);
11260 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11261 IEM_MC_SET_RIP_U16(u16Target);
11262 IEM_MC_END()
11263 return VINF_SUCCESS;
11264
11265 case IEMMODE_32BIT:
11266 IEM_MC_BEGIN(0, 1);
11267 IEM_MC_LOCAL(uint32_t, u32Target);
11268 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11269 IEM_MC_SET_RIP_U32(u32Target);
11270 IEM_MC_END()
11271 return VINF_SUCCESS;
11272
11273 case IEMMODE_64BIT:
11274 IEM_MC_BEGIN(0, 1);
11275 IEM_MC_LOCAL(uint64_t, u64Target);
11276 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11277 IEM_MC_SET_RIP_U64(u64Target);
11278 IEM_MC_END()
11279 return VINF_SUCCESS;
11280
11281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11282 }
11283 }
11284 else
11285 {
11286 /* The new RIP is taken from a memory location. */
11287 switch (pVCpu->iem.s.enmEffOpSize)
11288 {
11289 case IEMMODE_16BIT:
11290 IEM_MC_BEGIN(0, 2);
11291 IEM_MC_LOCAL(uint16_t, u16Target);
11292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11295 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11296 IEM_MC_SET_RIP_U16(u16Target);
11297 IEM_MC_END()
11298 return VINF_SUCCESS;
11299
11300 case IEMMODE_32BIT:
11301 IEM_MC_BEGIN(0, 2);
11302 IEM_MC_LOCAL(uint32_t, u32Target);
11303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11306 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11307 IEM_MC_SET_RIP_U32(u32Target);
11308 IEM_MC_END()
11309 return VINF_SUCCESS;
11310
11311 case IEMMODE_64BIT:
11312 IEM_MC_BEGIN(0, 2);
11313 IEM_MC_LOCAL(uint64_t, u64Target);
11314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11317 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11318 IEM_MC_SET_RIP_U64(u64Target);
11319 IEM_MC_END()
11320 return VINF_SUCCESS;
11321
11322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11323 }
11324 }
11325}
11326
11327
11328/**
11329 * Opcode 0xff /5.
11330 * @param bRm The RM byte.
11331 */
11332FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11333{
11334 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11335 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11336}
11337
11338
11339/**
11340 * Opcode 0xff /6.
11341 * @param bRm The RM byte.
11342 */
11343FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11344{
11345 IEMOP_MNEMONIC(push_Ev, "push Ev");
11346
11347 /* Registers are handled by a common worker. */
11348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11349 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11350
11351 /* Memory we do here. */
11352 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11353 switch (pVCpu->iem.s.enmEffOpSize)
11354 {
11355 case IEMMODE_16BIT:
11356 IEM_MC_BEGIN(0, 2);
11357 IEM_MC_LOCAL(uint16_t, u16Src);
11358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11361 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11362 IEM_MC_PUSH_U16(u16Src);
11363 IEM_MC_ADVANCE_RIP();
11364 IEM_MC_END();
11365 return VINF_SUCCESS;
11366
11367 case IEMMODE_32BIT:
11368 IEM_MC_BEGIN(0, 2);
11369 IEM_MC_LOCAL(uint32_t, u32Src);
11370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11373 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11374 IEM_MC_PUSH_U32(u32Src);
11375 IEM_MC_ADVANCE_RIP();
11376 IEM_MC_END();
11377 return VINF_SUCCESS;
11378
11379 case IEMMODE_64BIT:
11380 IEM_MC_BEGIN(0, 2);
11381 IEM_MC_LOCAL(uint64_t, u64Src);
11382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11386 IEM_MC_PUSH_U64(u64Src);
11387 IEM_MC_ADVANCE_RIP();
11388 IEM_MC_END();
11389 return VINF_SUCCESS;
11390
11391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11392 }
11393}
11394
11395
11396/**
11397 * @opcode 0xff
11398 */
11399FNIEMOP_DEF(iemOp_Grp5)
11400{
11401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11402 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11403 {
11404 case 0:
11405 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11406 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11407 case 1:
11408 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11409 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11410 case 2:
11411 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11412 case 3:
11413 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11414 case 4:
11415 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11416 case 5:
11417 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11418 case 6:
11419 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11420 case 7:
11421 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11422 return IEMOP_RAISE_INVALID_OPCODE();
11423 }
11424 AssertFailedReturn(VERR_IEM_IPE_3);
11425}
11426
11427
11428
11429const PFNIEMOP g_apfnOneByteMap[256] =
11430{
11431 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11432 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11433 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11434 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11435 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11436 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11437 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11438 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11439 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11440 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11441 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11442 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11443 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11444 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11445 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11446 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11447 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11448 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11449 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11450 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11451 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11452 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11453 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11454 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11455 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11456 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11457 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11458 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11459 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11460 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11461 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11462 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11463 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11464 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11465 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11466 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11467 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11468 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11469 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11470 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11471 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11472 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11473 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11474 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11475 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11476 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11477 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11478 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11479 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11480 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11481 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11482 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11483 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11484 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11485 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11486 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11487 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11488 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11489 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11490 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11491 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11492 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11493 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11494 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11495};
11496
11497
11498/** @} */
11499
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette